diff --git a/.changelog/unreleased/testing/694-dont-spawn-internal-account-vps.md b/.changelog/unreleased/testing/694-dont-spawn-internal-account-vps.md new file mode 100644 index 0000000000..664438b982 --- /dev/null +++ b/.changelog/unreleased/testing/694-dont-spawn-internal-account-vps.md @@ -0,0 +1,2 @@ +- Don't fake a wasm VP for internal addresses in tx tests + ([#694](https://github.com/anoma/namada/pull/694)) \ No newline at end of file diff --git a/.changelog/v0.8.0/bug-fixes/1099-wasm-reading.md b/.changelog/v0.8.0/bug-fixes/1099-wasm-reading.md new file mode 100644 index 0000000000..2e5cce09f3 --- /dev/null +++ b/.changelog/v0.8.0/bug-fixes/1099-wasm-reading.md @@ -0,0 +1,2 @@ +- Make read_wasm return an error instead of exiting in InitChain + ([#1099](https://github.com/anoma/anoma/pull/1099)) \ No newline at end of file diff --git a/.changelog/unreleased/bug-fixes/1249-fix-shell-last-epoch.md b/.changelog/v0.8.0/bug-fixes/1249-fix-shell-last-epoch.md similarity index 100% rename from .changelog/unreleased/bug-fixes/1249-fix-shell-last-epoch.md rename to .changelog/v0.8.0/bug-fixes/1249-fix-shell-last-epoch.md diff --git a/.changelog/v0.8.0/bug-fixes/279-new-merkle-tree.md b/.changelog/v0.8.0/bug-fixes/279-new-merkle-tree.md new file mode 100644 index 0000000000..e9c2b7c688 --- /dev/null +++ b/.changelog/v0.8.0/bug-fixes/279-new-merkle-tree.md @@ -0,0 +1,3 @@ +- Switch to a alternative sparse merkle tree implementation for IBC sub-tree + to be able to support proofs compatible with the current version of ICS23 + ([#279](https://github.com/anoma/namada/pull/279)) \ No newline at end of file diff --git a/.changelog/v0.8.0/bug-fixes/326-fix-validator-raw-hash.md b/.changelog/v0.8.0/bug-fixes/326-fix-validator-raw-hash.md new file mode 100644 index 0000000000..bf8ef22579 --- /dev/null +++ b/.changelog/v0.8.0/bug-fixes/326-fix-validator-raw-hash.md @@ -0,0 +1,2 @@ +- Fixed validator raw hash corresponding to validator address in Tendermint + ([#326](https://github.com/anoma/namada/pull/326)) \ No newline at end of file diff --git a/.changelog/unreleased/bug-fixes/384-fix-new-epoch-start-height.md b/.changelog/v0.8.0/bug-fixes/384-fix-new-epoch-start-height.md similarity index 100% rename from .changelog/unreleased/bug-fixes/384-fix-new-epoch-start-height.md rename to .changelog/v0.8.0/bug-fixes/384-fix-new-epoch-start-height.md diff --git a/.changelog/unreleased/bug-fixes/419-fix-rustdoc.md b/.changelog/v0.8.0/bug-fixes/419-fix-rustdoc.md similarity index 100% rename from .changelog/unreleased/bug-fixes/419-fix-rustdoc.md rename to .changelog/v0.8.0/bug-fixes/419-fix-rustdoc.md diff --git a/.changelog/v0.8.0/bug-fixes/594-fix-pred-epoch-height.md b/.changelog/v0.8.0/bug-fixes/594-fix-pred-epoch-height.md new file mode 100644 index 0000000000..03b7229bcc --- /dev/null +++ b/.changelog/v0.8.0/bug-fixes/594-fix-pred-epoch-height.md @@ -0,0 +1,2 @@ +- Fix the value recorded for epoch start block height. + ([#594](https://github.com/anoma/namada/pull/594)) \ No newline at end of file diff --git a/.changelog/v0.8.0/features/132-multitoken-transfer.md b/.changelog/v0.8.0/features/132-multitoken-transfer.md new file mode 100644 index 0000000000..2b913d7d19 --- /dev/null +++ b/.changelog/v0.8.0/features/132-multitoken-transfer.md @@ -0,0 +1,2 @@ +- Added multitoken transfer and query for bridges + ([#132](https://github.com/anoma/namada/issues/132)) \ No newline at end of file diff --git a/.changelog/v0.8.0/features/503-lazy-vec-and-map.md b/.changelog/v0.8.0/features/503-lazy-vec-and-map.md new file mode 100644 index 0000000000..d29ee5fd9c --- /dev/null +++ b/.changelog/v0.8.0/features/503-lazy-vec-and-map.md @@ -0,0 +1,2 @@ +- Added lazy vector and map data structures for ledger storage + ([#503](https://github.com/anoma/namada/pull/503)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/1093-unify-native-and-wasm-vp.md b/.changelog/v0.8.0/improvements/1093-unify-native-and-wasm-vp.md new file mode 100644 index 0000000000..e39308413f --- /dev/null +++ b/.changelog/v0.8.0/improvements/1093-unify-native-and-wasm-vp.md @@ -0,0 +1,3 @@ +- Added WASM transaction and validity predicate `Ctx` with methods for host + environment functions to unify the interface of native VPs and WASM VPs under + `trait VpEnv` ([#1093](https://github.com/anoma/anoma/pull/1093)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/1138-change-wallet-bihashmap.md b/.changelog/v0.8.0/improvements/1138-change-wallet-bihashmap.md new file mode 100644 index 0000000000..d13b82e697 --- /dev/null +++ b/.changelog/v0.8.0/improvements/1138-change-wallet-bihashmap.md @@ -0,0 +1,4 @@ +- Allows simple retrival of aliases from addresses in the wallet without + the need for multiple hashmaps. This is the first step to improving the + UI if one wants to show aliases when fetching addresses from anoma wallet + ([#1138](https://github.com/anoma/anoma/pull/1138)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/1148-allow-absolute-wasm-dir.md b/.changelog/v0.8.0/improvements/1148-allow-absolute-wasm-dir.md new file mode 100644 index 0000000000..1e267d6faf --- /dev/null +++ b/.changelog/v0.8.0/improvements/1148-allow-absolute-wasm-dir.md @@ -0,0 +1,2 @@ +- Allow specifying an absolute path for the wasm directory + ([#1148](https://github.com/anoma/anoma/issues/1148)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/1159-anomac-download-wasms.md b/.changelog/v0.8.0/improvements/1159-anomac-download-wasms.md new file mode 100644 index 0000000000..20ae41a073 --- /dev/null +++ b/.changelog/v0.8.0/improvements/1159-anomac-download-wasms.md @@ -0,0 +1,2 @@ +- Add functionality to anomac to download wasms for a given chain + ([#1159](https://github.com/anoma/anoma/pull/1159)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/1161-anomaw-address-find.md b/.changelog/v0.8.0/improvements/1161-anomaw-address-find.md new file mode 100644 index 0000000000..4a2903f6f9 --- /dev/null +++ b/.changelog/v0.8.0/improvements/1161-anomaw-address-find.md @@ -0,0 +1,2 @@ +- Improved CLI experience for 'anomaw address find' + ([#1161](https://github.com/anoma/anoma/pull/1161)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/1168-pbkdf-iterations.md b/.changelog/v0.8.0/improvements/1168-pbkdf-iterations.md new file mode 100644 index 0000000000..417e0f8af8 --- /dev/null +++ b/.changelog/v0.8.0/improvements/1168-pbkdf-iterations.md @@ -0,0 +1,2 @@ +- Wallet: Increase the number of iterations used for keys encryption to the + recommended value. ([#1168](https://github.com/anoma/anoma/issues/1168)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/1176-genesis-config-error.md b/.changelog/v0.8.0/improvements/1176-genesis-config-error.md new file mode 100644 index 0000000000..3e7f9eb996 --- /dev/null +++ b/.changelog/v0.8.0/improvements/1176-genesis-config-error.md @@ -0,0 +1,2 @@ +- Improve the error message that is displayed when anoma binaries are run without + having joined a chain ([#1176](https://github.com/anoma/anoma/pull/1176)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/1231-refactor-ledger-run-with-cleanup.md b/.changelog/v0.8.0/improvements/1231-refactor-ledger-run-with-cleanup.md new file mode 100644 index 0000000000..6d0ee99747 --- /dev/null +++ b/.changelog/v0.8.0/improvements/1231-refactor-ledger-run-with-cleanup.md @@ -0,0 +1,2 @@ +- Refactored ledger startup code + ([#1231](https://github.com/anoma/anoma/pull/1231)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/1248-remove-evidence-params.md b/.changelog/v0.8.0/improvements/1248-remove-evidence-params.md new file mode 100644 index 0000000000..97297a93e1 --- /dev/null +++ b/.changelog/v0.8.0/improvements/1248-remove-evidence-params.md @@ -0,0 +1,3 @@ +- Replace Tendermint consensus evidence parameters with + application level evidence filter for outdated evidence. + ([#1248](https://github.com/anoma/anoma/pull/1248)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/240-host-env-vp-write-check.md b/.changelog/v0.8.0/improvements/240-host-env-vp-write-check.md new file mode 100644 index 0000000000..ca42bc57ef --- /dev/null +++ b/.changelog/v0.8.0/improvements/240-host-env-vp-write-check.md @@ -0,0 +1,2 @@ +- Validate WASM code of validity predicates written by transactions. + ([#240](https://github.com/anoma/anoma/pull/240)) diff --git a/.changelog/v0.8.0/improvements/318-refactor-pos-vp.md b/.changelog/v0.8.0/improvements/318-refactor-pos-vp.md new file mode 100644 index 0000000000..5ed78c3cc6 --- /dev/null +++ b/.changelog/v0.8.0/improvements/318-refactor-pos-vp.md @@ -0,0 +1 @@ +- Refactored PoS VP logic ([#318](https://github.com/anoma/namada/pull/318)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/324-common-read-storage-trait.md b/.changelog/v0.8.0/improvements/324-common-read-storage-trait.md new file mode 100644 index 0000000000..b8e9defe95 --- /dev/null +++ b/.changelog/v0.8.0/improvements/324-common-read-storage-trait.md @@ -0,0 +1,3 @@ +- Added a StorageRead trait for a common interface for VPs prior and posterior + state, transactions and direct storage access for protocol and RPC handlers + ([#324](https://github.com/anoma/namada/pull/324)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/331-common-write-storage-trait.md b/.changelog/v0.8.0/improvements/331-common-write-storage-trait.md new file mode 100644 index 0000000000..2e3e605197 --- /dev/null +++ b/.changelog/v0.8.0/improvements/331-common-write-storage-trait.md @@ -0,0 +1,2 @@ +- Added a StorageWrite trait for a common interface for transactions and direct + storage access for protocol ([#331](https://github.com/anoma/namada/pull/331)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/334-refactor-storage-read-write.md b/.changelog/v0.8.0/improvements/334-refactor-storage-read-write.md new file mode 100644 index 0000000000..0642596268 --- /dev/null +++ b/.changelog/v0.8.0/improvements/334-refactor-storage-read-write.md @@ -0,0 +1,2 @@ +- Re-use encoding/decoding storage write/read and handle any errors + ([#334](https://github.com/anoma/namada/pull/334)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/335-refactor-storage-prefix-iter.md b/.changelog/v0.8.0/improvements/335-refactor-storage-prefix-iter.md new file mode 100644 index 0000000000..d51f6c72f0 --- /dev/null +++ b/.changelog/v0.8.0/improvements/335-refactor-storage-prefix-iter.md @@ -0,0 +1,3 @@ +- Added a simpler prefix iterator API that returns `std::iter::Iterator` with + the storage keys parsed and a variant that also decodes stored values with + Borsh ([#335](https://github.com/anoma/namada/pull/335)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/337-wasm-cargo-target-dir.md b/.changelog/v0.8.0/improvements/337-wasm-cargo-target-dir.md new file mode 100644 index 0000000000..4cc10d7292 --- /dev/null +++ b/.changelog/v0.8.0/improvements/337-wasm-cargo-target-dir.md @@ -0,0 +1,2 @@ +- Handles the case where a custom `$CARGO_TARGET_DIR` is set during WASM build + ([#337](https://github.com/anoma/anoma/pull/337)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/380-vp-env-pre-post-via-storage-api.md b/.changelog/v0.8.0/improvements/380-vp-env-pre-post-via-storage-api.md new file mode 100644 index 0000000000..655cdf256a --- /dev/null +++ b/.changelog/v0.8.0/improvements/380-vp-env-pre-post-via-storage-api.md @@ -0,0 +1,3 @@ +- Added `pre/post` methods into `trait VpEnv` that return objects implementing + `trait StorageRead` for re-use of library code written on top of `StorageRead` + inside validity predicates. ([#380](https://github.com/anoma/namada/pull/380)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/409-sorted-prefix-iter.md b/.changelog/v0.8.0/improvements/409-sorted-prefix-iter.md new file mode 100644 index 0000000000..2f95505960 --- /dev/null +++ b/.changelog/v0.8.0/improvements/409-sorted-prefix-iter.md @@ -0,0 +1,3 @@ +- Fix order of prefix iterator to be sorted by storage + keys and add support for a reverse order prefix iterator. + ([#409](https://github.com/anoma/namada/issues/409)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/465-vp-tx-env-conrete-error.md b/.changelog/v0.8.0/improvements/465-vp-tx-env-conrete-error.md new file mode 100644 index 0000000000..e40ff76a17 --- /dev/null +++ b/.changelog/v0.8.0/improvements/465-vp-tx-env-conrete-error.md @@ -0,0 +1,2 @@ +- Re-use `storage_api::Error` type that supports wrapping custom error in `VpEnv` and `TxEnv` traits. + ([#465](https://github.com/anoma/namada/pull/465)) diff --git a/.changelog/v0.8.0/improvements/467-governance-fixes.md b/.changelog/v0.8.0/improvements/467-governance-fixes.md new file mode 100644 index 0000000000..441277d632 --- /dev/null +++ b/.changelog/v0.8.0/improvements/467-governance-fixes.md @@ -0,0 +1,2 @@ +- Fixed governance parameters, tally, tx whitelist and renamed treasury + ([#467](https://github.com/anoma/namada/issues/467)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/518-mdbook-admonish-for-specs.md b/.changelog/v0.8.0/improvements/518-mdbook-admonish-for-specs.md new file mode 100644 index 0000000000..6fa947fbb5 --- /dev/null +++ b/.changelog/v0.8.0/improvements/518-mdbook-admonish-for-specs.md @@ -0,0 +1,2 @@ +- Enable mdbook-admonish for the specs + ([#518](https://github.com/anoma/namada/issues/518)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/547-multistore-refactor.md b/.changelog/v0.8.0/improvements/547-multistore-refactor.md new file mode 100644 index 0000000000..2aeca6ccfa --- /dev/null +++ b/.changelog/v0.8.0/improvements/547-multistore-refactor.md @@ -0,0 +1,2 @@ +- Extend Merkle tree storage to support multiple Merkle trees with a uniform + interface. ([#547](https://github.com/anoma/namada/pull/547)) \ No newline at end of file diff --git a/.changelog/v0.8.0/improvements/605-fix-transaction-gas-exceededed-typo.md b/.changelog/v0.8.0/improvements/605-fix-transaction-gas-exceededed-typo.md new file mode 100644 index 0000000000..46134ffe7a --- /dev/null +++ b/.changelog/v0.8.0/improvements/605-fix-transaction-gas-exceededed-typo.md @@ -0,0 +1 @@ +- Fix a typo in an error ([#605](https://github.com/anoma/namada/issues/605)) \ No newline at end of file diff --git a/.changelog/v0.8.0/miscellaneous/1096-wasm-workspace.md b/.changelog/v0.8.0/miscellaneous/1096-wasm-workspace.md new file mode 100644 index 0000000000..a15f343025 --- /dev/null +++ b/.changelog/v0.8.0/miscellaneous/1096-wasm-workspace.md @@ -0,0 +1,2 @@ +- Use a cargo workspace for some of our wasm crates + ([#1096](https://github.com/anoma/anoma/pull/1096)) \ No newline at end of file diff --git a/.changelog/v0.8.0/miscellaneous/1243-debug-wasm-build.md b/.changelog/v0.8.0/miscellaneous/1243-debug-wasm-build.md new file mode 100644 index 0000000000..2acf6479aa --- /dev/null +++ b/.changelog/v0.8.0/miscellaneous/1243-debug-wasm-build.md @@ -0,0 +1,2 @@ +- Added a make recipe to build WASM in debug mode with `make debug-wasm-scripts` + ([#1243](https://github.com/anoma/anoma/pull/1243)) \ No newline at end of file diff --git a/.changelog/v0.8.0/miscellaneous/452-update-rocksdb.md b/.changelog/v0.8.0/miscellaneous/452-update-rocksdb.md new file mode 100644 index 0000000000..83ad52ce87 --- /dev/null +++ b/.changelog/v0.8.0/miscellaneous/452-update-rocksdb.md @@ -0,0 +1,2 @@ +- Updated rockDB dependency to 0.19.0 and enabled its jemalloc feature. + ([#452](https://github.com/anoma/namada/pull/452)) \ No newline at end of file diff --git a/.changelog/v0.8.0/miscellaneous/493-remove-intent-gossiper.md b/.changelog/v0.8.0/miscellaneous/493-remove-intent-gossiper.md new file mode 100644 index 0000000000..543edeb6aa --- /dev/null +++ b/.changelog/v0.8.0/miscellaneous/493-remove-intent-gossiper.md @@ -0,0 +1,2 @@ +- Removed intent gossiper and matchmaker code + ([#493](https://github.com/anoma/namada/issues/493)) \ No newline at end of file diff --git a/.changelog/v0.8.0/summary.md b/.changelog/v0.8.0/summary.md new file mode 100644 index 0000000000..b23b8369d9 --- /dev/null +++ b/.changelog/v0.8.0/summary.md @@ -0,0 +1 @@ +Namada 0.8.0 is a regular minor release. diff --git a/.changelog/v0.8.0/testing/1221-e2e-keep-temp-fix.md b/.changelog/v0.8.0/testing/1221-e2e-keep-temp-fix.md new file mode 100644 index 0000000000..3c61ceb518 --- /dev/null +++ b/.changelog/v0.8.0/testing/1221-e2e-keep-temp-fix.md @@ -0,0 +1,2 @@ +- Fixed ANOMA_E2E_KEEP_TEMP=true to work in e2e::setup::network + ([#1221](https://github.com/anoma/anoma/issues/1221)) \ No newline at end of file diff --git a/.changelog/v0.8.0/testing/462-pos-tx-tests.md b/.changelog/v0.8.0/testing/462-pos-tx-tests.md new file mode 100644 index 0000000000..09bacbc5f0 --- /dev/null +++ b/.changelog/v0.8.0/testing/462-pos-tx-tests.md @@ -0,0 +1,2 @@ +- Test PoS transaction for bonding, unbonding and withdrawal. Fixed an issue + found on unbonding. ([#462](https://github.com/anoma/anoma/issues/462)) \ No newline at end of file diff --git a/.changelog/v0.8.0/testing/590-fix-tx-bond-test-condition.md b/.changelog/v0.8.0/testing/590-fix-tx-bond-test-condition.md new file mode 100644 index 0000000000..80435089b2 --- /dev/null +++ b/.changelog/v0.8.0/testing/590-fix-tx-bond-test-condition.md @@ -0,0 +1,2 @@ +- Fix a condition in tx_bond test that causes a false negative result + ([#590](https://github.com/anoma/namada/pull/590)) \ No newline at end of file diff --git a/.changelog/v0.8.1/improvements/510-shims-merge.md b/.changelog/v0.8.1/improvements/510-shims-merge.md new file mode 100644 index 0000000000..1268bf87e4 --- /dev/null +++ b/.changelog/v0.8.1/improvements/510-shims-merge.md @@ -0,0 +1,2 @@ +- Shim ABCI++ methods for tendermint + ([#510](https://github.com/anoma/namada/pull/510)) \ No newline at end of file diff --git a/.changelog/v0.8.1/summary.md b/.changelog/v0.8.1/summary.md new file mode 100644 index 0000000000..cc495e5584 --- /dev/null +++ b/.changelog/v0.8.1/summary.md @@ -0,0 +1,2 @@ +Namada 0.8.1 is a point release focused on standardizing Tendermint +compatibility. diff --git a/.github/workflows/automation.yml b/.github/workflows/automation.yml index 2bcd22fa6a..58ea874ee0 100644 --- a/.github/workflows/automation.yml +++ b/.github/workflows/automation.yml @@ -77,6 +77,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_READ_ORG_TOKEN: ${{ secrets.GT_READ_ORG }} GITHUB_DISPATCH_TOKEN: ${{ secrets.GT_DISPATCH }} + SLACK_DEVNET_SECRET: ${{ secrets.SLACK_DEVNET_SECRET }} BINARIES_COMMIT_SHA: ${{ steps.comment-branch.outputs.head_sha }} - name: Upload load tester logs if: ${{ matrix.make.logs == 'true' && steps.check.outputs.triggered == 'true' }} diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index f8d07c94f7..1c4cbd3412 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -31,7 +31,7 @@ jobs: timeout-minutes: 30 runs-on: ${{ matrix.os }} container: - image: ghcr.io/anoma/namada:wasm-0.6.1 + image: ghcr.io/anoma/namada:wasm-0.8.0 strategy: fail-fast: false matrix: @@ -117,7 +117,7 @@ jobs: cache_key: anoma cache_version: v1 wait_for: anoma-release (ubuntu-latest, ABCI Release build, anoma-e2e-release, v1) - tendermint_artifact: tendermint-unreleased-559fb33ff9b27503ce7ac1c7d8589fe1d8b3e900 + tendermint_artifact: tendermint-unreleased-ad825dcadbd4b98c3f91ce5a711e4fb36a69c377 env: CARGO_INCREMENTAL: 0 diff --git a/.github/workflows/build-tendermint.yml b/.github/workflows/build-tendermint.yml index 91a5259dac..7914a39a49 100644 --- a/.github/workflows/build-tendermint.yml +++ b/.github/workflows/build-tendermint.yml @@ -21,9 +21,6 @@ jobs: matrix: os: [ubuntu-latest] make: - - name: tendermint-unreleased - repository: heliaxdev/tendermint - tendermint_version: 559fb33ff9b27503ce7ac1c7d8589fe1d8b3e900 - name: tendermint-unreleased repository: heliaxdev/tendermint tendermint_version: ad825dcadbd4b98c3f91ce5a711e4fb36a69c377 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 3f5e5e5159..0ba9f39b98 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -117,6 +117,13 @@ jobs: cd ${{ matrix.make.folder }} && mdbook-admonish install - name: ${{ matrix.make.name }} run: ${{ matrix.make.command }} + - name: Zip doc folder + run: tar -cvf ${{ matrix.make.bucket }}.tar ${{ matrix.make.folder }}/book + - name: Upload rendered docs + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.make.bucket }}-${{ github.sha }}.tar + path: ${{ matrix.make.bucket }}.tar - name: Publish docs if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} run: aws s3 sync ${{ matrix.make.folder }}/book/html/ s3://${{ matrix.make.bucket }} --region eu-west-1 --delete diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b926d268d7..5cbc1e9207 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -35,6 +35,7 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 0 + - run: git fetch --tags --force origin # WA: https://github.com/actions/checkout/issues/882 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v1 with: diff --git a/.gitignore b/.gitignore index 5f835e6405..4718c258b2 100644 --- a/.gitignore +++ b/.gitignore @@ -26,4 +26,4 @@ target/ wasm/*.wasm # app version string file -/apps/version.rs +/apps/version.rs \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 12656661af..b7095801d5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,118 @@ # CHANGELOG +## v0.8.1 + +Namada 0.8.1 is a point release focused on standardizing Tendermint +compatibility. + +### IMPROVEMENTS + +- Shim ABCI++ methods for tendermint + ([#510](https://github.com/anoma/namada/pull/510)) + +## v0.8.0 + +Namada 0.8.0 is a regular minor release. + +### BUG FIXES + +- Switch to a alternative sparse merkle tree implementation for IBC sub-tree + to be able to support proofs compatible with the current version of ICS23 + ([#279](https://github.com/anoma/namada/pull/279)) +- Fixed validator raw hash corresponding to validator address in Tendermint + ([#326](https://github.com/anoma/namada/pull/326)) +- Fix the value recorded for epoch start block height. + ([#384](https://github.com/anoma/namada/issues/384)) +- Fix the rustdoc build. ([#419](https://github.com/anoma/namada/issues/419)) +- Fix the value recorded for epoch start block height. + ([#594](https://github.com/anoma/namada/pull/594)) +- Make read_wasm return an error instead of exiting in InitChain + ([#1099](https://github.com/anoma/anoma/pull/1099)) +- Fix the `last_epoch` field in the shell to only be updated when the block is + committed. ([#1249](https://github.com/anoma/anoma/pull/1249)) + +### FEATURES + +- Added multitoken transfer and query for bridges + ([#132](https://github.com/anoma/namada/issues/132)) +- Added lazy vector and map data structures for ledger storage + ([#503](https://github.com/anoma/namada/pull/503)) + +### IMPROVEMENTS + +- Validate WASM code of validity predicates written by transactions. + ([#240](https://github.com/anoma/anoma/pull/240)) +- Refactored PoS VP logic ([#318](https://github.com/anoma/namada/pull/318)) +- Added a StorageRead trait for a common interface for VPs prior and posterior + state, transactions and direct storage access for protocol and RPC handlers + ([#324](https://github.com/anoma/namada/pull/324)) +- Added a StorageWrite trait for a common interface for transactions and direct + storage access for protocol ([#331](https://github.com/anoma/namada/pull/331)) +- Re-use encoding/decoding storage write/read and handle any errors + ([#334](https://github.com/anoma/namada/pull/334)) +- Added a simpler prefix iterator API that returns `std::iter::Iterator` with + the storage keys parsed and a variant that also decodes stored values with + Borsh ([#335](https://github.com/anoma/namada/pull/335)) +- Handles the case where a custom `$CARGO_TARGET_DIR` is set during WASM build + ([#337](https://github.com/anoma/anoma/pull/337)) +- Added `pre/post` methods into `trait VpEnv` that return objects implementing + `trait StorageRead` for re-use of library code written on top of `StorageRead` + inside validity predicates. ([#380](https://github.com/anoma/namada/pull/380)) +- Fix order of prefix iterator to be sorted by storage + keys and add support for a reverse order prefix iterator. + ([#409](https://github.com/anoma/namada/issues/409)) +- Re-use `storage_api::Error` type that supports wrapping custom error in `VpEnv` and `TxEnv` traits. + ([#465](https://github.com/anoma/namada/pull/465)) +- Fixed governance parameters, tally, tx whitelist and renamed treasury + ([#467](https://github.com/anoma/namada/issues/467)) +- Enable mdbook-admonish for the specs + ([#518](https://github.com/anoma/namada/issues/518)) +- Extend Merkle tree storage to support multiple Merkle trees with a uniform + interface. ([#547](https://github.com/anoma/namada/pull/547)) +- Fix a typo in an error ([#605](https://github.com/anoma/namada/issues/605)) +- Added WASM transaction and validity predicate `Ctx` with methods for host + environment functions to unify the interface of native VPs and WASM VPs under + `trait VpEnv` ([#1093](https://github.com/anoma/anoma/pull/1093)) +- Allows simple retrival of aliases from addresses in the wallet without + the need for multiple hashmaps. This is the first step to improving the + UI if one wants to show aliases when fetching addresses from anoma wallet + ([#1138](https://github.com/anoma/anoma/pull/1138)) +- Allow specifying an absolute path for the wasm directory + ([#1148](https://github.com/anoma/anoma/issues/1148)) +- Add functionality to anomac to download wasms for a given chain + ([#1159](https://github.com/anoma/anoma/pull/1159)) +- Improved CLI experience for 'anomaw address find' + ([#1161](https://github.com/anoma/anoma/pull/1161)) +- Wallet: Increase the number of iterations used for keys encryption to the + recommended value. ([#1168](https://github.com/anoma/anoma/issues/1168)) +- Improve the error message that is displayed when anoma binaries are run without + having joined a chain ([#1176](https://github.com/anoma/anoma/pull/1176)) +- Refactored ledger startup code + ([#1231](https://github.com/anoma/anoma/pull/1231)) +- Replace Tendermint consensus evidence parameters with + application level evidence filter for outdated evidence. + ([#1248](https://github.com/anoma/anoma/pull/1248)) + +### MISCELLANEOUS + +- Updated rockDB dependency to 0.19.0 and enabled its jemalloc feature. + ([#452](https://github.com/anoma/namada/pull/452)) +- Removed intent gossiper and matchmaker code + ([#493](https://github.com/anoma/namada/issues/493)) +- Use a cargo workspace for some of our wasm crates + ([#1096](https://github.com/anoma/anoma/pull/1096)) +- Added a make recipe to build WASM in debug mode with `make debug-wasm-scripts` + ([#1243](https://github.com/anoma/anoma/pull/1243)) + +### TESTING + +- Test PoS transaction for bonding, unbonding and withdrawal. Fixed an issue + found on unbonding. ([#462](https://github.com/anoma/anoma/issues/462)) +- Fix a condition in tx_bond test that causes a false negative result + ([#590](https://github.com/anoma/namada/pull/590)) +- Fixed ANOMA_E2E_KEEP_TEMP=true to work in e2e::setup::network + ([#1221](https://github.com/anoma/anoma/issues/1221)) + ## v0.7.1 Namada 0.7.1 is a patch release of the Namada software, continuing the diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 42e29950d8..c00d3f08ec 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -47,4 +47,4 @@ for i in $(ls -d .changelog/*/*/); do basename "$i"; done ## Development priorities -If you’d like to follow the development or contribute with new or unimplemented features, we recommend to check [the pinned issues](https://github.com/anoma/anoma/issues) that are set to tracking issues in current focus of the ledger, intent gossiper and matchmaker team. +If you’d like to follow the development or contribute with new or unimplemented features, we recommend to check [the issues](https://github.com/anoma/namada/issues) that are in current focus of the ledger team. diff --git a/Cargo.lock b/Cargo.lock index 9248c9915c..fa8fada1e9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9,21 +9,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57a7559404a7f3573127aab53c08ce37a6c6a315c374a31070f3c91cd1b4a7fe" dependencies = [ "bitflags", - "bytes 1.1.0", + "bytes 1.2.1", "futures-core", "futures-sink", "log 0.4.17", "memchr", - "pin-project-lite 0.2.9", + "pin-project-lite", "tokio", - "tokio-util 0.7.3", + "tokio-util 0.7.4", ] [[package]] name = "actix-http" -version = "3.0.4" +version = "3.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5885cb81a0d4d0d322864bea1bb6c2a8144626b4fdc625d4c51eba197e7797a" +checksum = "0c83abf9903e1f0ad9973cc4f7b9767fd5a03a583f51a5b7a339e07987cd2724" dependencies = [ "actix-codec", "actix-rt", @@ -32,7 +32,7 @@ dependencies = [ "ahash", "base64 0.13.0", "bitflags", - "bytes 1.1.0", + "bytes 1.2.1", "bytestring", "derive_more", "encoding_rs", @@ -45,13 +45,13 @@ dependencies = [ "itoa", "language-tags 0.3.2", "local-channel", - "log 0.4.17", "mime 0.3.16", - "percent-encoding 2.1.0", - "pin-project-lite 0.2.9", + "percent-encoding 2.2.0", + "pin-project-lite", "rand 0.8.5", - "sha-1 0.10.0", - "smallvec 1.8.0", + "sha1", + "smallvec 1.10.0", + "tracing 0.1.37", "zstd", ] @@ -73,7 +73,7 @@ checksum = "3b894941f818cfdc7ccc4b9e60fa7e53b5042a2e8567270f9147d5591893373a" dependencies = [ "futures-core", "paste", - "pin-project-lite 0.2.9", + "pin-project-lite", ] [[package]] @@ -90,19 +90,19 @@ dependencies = [ "http", "log 0.4.17", "openssl", - "pin-project-lite 0.2.9", + "pin-project-lite", "tokio-openssl", - "tokio-util 0.7.3", + "tokio-util 0.7.4", ] [[package]] name = "actix-utils" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e491cbaac2e7fc788dfff99ff48ef317e23b3cf63dbaf7aaab6418f40f92aa94" +checksum = "88a1dcdff1466e3c2488e1cb5c36a71822750ad43839937f85d2f4d9f8b705d8" dependencies = [ "local-waker", - "pin-project-lite 0.2.9", + "pin-project-lite", ] [[package]] @@ -111,7 +111,7 @@ version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b" dependencies = [ - "gimli 0.26.1", + "gimli 0.26.2", ] [[package]] @@ -120,57 +120,22 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" -[[package]] -name = "aead" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b613b8e1e3cf911a086f53f03bf286f52fd7a7258e4fa606f0ef220d39d8877" -dependencies = [ - "generic-array 0.14.5", -] - -[[package]] -name = "aes" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8" -dependencies = [ - "cfg-if 1.0.0", - "cipher", - "cpufeatures 0.2.2", - "opaque-debug 0.3.0", -] - -[[package]] -name = "aes-gcm" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df5f85a83a7d8b0442b6aa7b504b8212c1733da07b98aae43d4bc21b2cb3cdf6" -dependencies = [ - "aead", - "aes", - "cipher", - "ctr", - "ghash", - "subtle 2.4.1", -] - [[package]] name = "ahash" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ - "getrandom 0.2.6", + "getrandom 0.2.7", "once_cell", "version_check 0.9.4", ] [[package]] name = "aho-corasick" -version = "0.7.18" +version = "0.7.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e" dependencies = [ "memchr", ] @@ -195,9 +160,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.57" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc" +checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602" [[package]] name = "ark-bls12-381" @@ -341,15 +306,9 @@ checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" [[package]] name = "ascii" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbf56136a5198c7b01a49e3afcbef6cf84597273d298f54432926024107b0109" - -[[package]] -name = "asn1_der" -version = "0.7.5" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e22d1f4b888c298a027c99dc9048015fac177587de20fc30232a057dfbe24a21" +checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16" [[package]] name = "assert_cmd" @@ -373,9 +332,9 @@ checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" [[package]] name = "async-channel" -version = "1.6.1" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2114d64672151c0c5eaa5e131ec84a74f06e1e559830dabba01ca30605d66319" +checksum = "e14485364214912d3b19cc3435dde4df66065127f05fa0d75c712f36f12c2f28" dependencies = [ "concurrent-queue", "event-listener", @@ -398,26 +357,25 @@ dependencies = [ [[package]] name = "async-global-executor" -version = "2.0.4" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c290043c9a95b05d45e952fb6383c67bcb61471f60cfa21e890dba6654234f43" +checksum = "0da5b41ee986eed3f524c380e6d64965aea573882a8907682ad100f7859305ca" dependencies = [ "async-channel", "async-executor", "async-io", - "async-mutex", + "async-lock", "blocking", "futures-lite", - "num_cpus", "once_cell", ] [[package]] name = "async-io" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5e18f61464ae81cde0a23e713ae8fd299580c54d697a35820cfd0625b8b0e07" +version = "1.9.0" +source = "git+https://github.com/heliaxdev/async-io.git?rev=9285dad39c9a37ecd0dbd498c5ce5b0e65b02489#9285dad39c9a37ecd0dbd498c5ce5b0e65b02489" dependencies = [ + "autocfg 1.1.0", "concurrent-queue", "futures-lite", "libc", @@ -425,8 +383,9 @@ dependencies = [ "once_cell", "parking", "polling", + "rustversion", "slab", - "socket2 0.4.4", + "socket2", "waker-fn", "winapi 0.3.9", ] @@ -440,28 +399,20 @@ dependencies = [ "event-listener", ] -[[package]] -name = "async-mutex" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479db852db25d9dbf6204e6cb6253698f175c15726470f78af0d918e99d6156e" -dependencies = [ - "event-listener", -] - [[package]] name = "async-process" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf2c06e30a24e8c78a3987d07f0930edf76ef35e027e7bdb063fccafdad1f60c" +version = "1.5.0" +source = "git+https://github.com/heliaxdev/async-process.git?rev=e42c527e87d937da9e01aaeb563c0b948580dc89#e42c527e87d937da9e01aaeb563c0b948580dc89" dependencies = [ "async-io", + "autocfg 1.1.0", "blocking", "cfg-if 1.0.0", "event-listener", "futures-lite", "libc", "once_cell", + "rustversion", "signal-hook", "winapi 0.3.9", ] @@ -477,7 +428,7 @@ dependencies = [ "async-io", "async-lock", "async-process", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.12", "futures-channel", "futures-core", "futures-io", @@ -488,26 +439,12 @@ dependencies = [ "memchr", "num_cpus", "once_cell", - "pin-project-lite 0.2.9", + "pin-project-lite", "pin-utils", "slab", "wasm-bindgen-futures", ] -[[package]] -name = "async-std-resolver" -version = "0.20.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbf3e776afdf3a2477ef4854b85ba0dff3bd85792f685fb3c68948b4d304e4f0" -dependencies = [ - "async-std", - "async-trait", - "futures-io", - "futures-util", - "pin-utils", - "trust-dns-resolver", -] - [[package]] name = "async-stream" version = "0.3.3" @@ -531,15 +468,15 @@ dependencies = [ [[package]] name = "async-task" -version = "4.2.0" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30696a84d817107fc028e049980e09d5e140e8da8f1caeb17e8e950658a3cea9" +checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" [[package]] name = "async-trait" -version = "0.1.56" +version = "0.1.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96cf8829f67d2eab0b2dfa42c5d0ef737e0724e4a82b01b3e292456202b19716" +checksum = "1e805d94e6b5001b651426cf4cd446b1ab5f319d27bab5c644f61de0a804360c" dependencies = [ "proc-macro2", "quote", @@ -555,35 +492,13 @@ dependencies = [ "futures-io", "futures-util", "log 0.4.17", - "pin-project-lite 0.2.9", + "pin-project-lite", "tokio", "tokio-rustls", "tungstenite 0.12.0", "webpki-roots", ] -[[package]] -name = "asynchronous-codec" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0de5164e5edbf51c45fb8c2d9664ae1c095cce1b265ecf7569093c0d66ef690" -dependencies = [ - "bytes 1.1.0", - "futures-sink", - "futures-util", - "memchr", - "pin-project-lite 0.2.9", -] - -[[package]] -name = "atomic" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b88d82667eca772c4aa12f0f1348b3ae643424c8876448f3f7bd5787032e234c" -dependencies = [ - "autocfg 1.1.0", -] - [[package]] name = "atomic-waker" version = "1.0.0" @@ -592,12 +507,12 @@ checksum = "065374052e7df7ee4047b1160cca5e1467a12351a40b3da123c870ba0b8eda2a" [[package]] name = "atty" -version = "0.2.11" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ + "hermit-abi", "libc", - "termion", "winapi 0.3.9", ] @@ -630,7 +545,7 @@ dependencies = [ "actix-utils", "ahash", "base64 0.13.0", - "bytes 1.1.0", + "bytes 1.2.1", "cfg-if 1.0.0", "derive_more", "futures-core", @@ -641,10 +556,10 @@ dependencies = [ "log 0.4.17", "mime 0.3.16", "openssl", - "percent-encoding 2.1.0", - "pin-project-lite 0.2.9", + "percent-encoding 2.2.0", + "pin-project-lite", "rand 0.8.5", - "serde 1.0.137", + "serde 1.0.145", "serde_json", "serde_urlencoded", "tokio", @@ -652,16 +567,16 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.65" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61" +checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7" dependencies = [ "addr2line", "cc", "cfg-if 1.0.0", "libc", "miniz_oxide", - "object", + "object 0.29.0", "rustc-demangle", ] @@ -684,12 +599,6 @@ dependencies = [ "byteorder", ] -[[package]] -name = "base64" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" - [[package]] name = "base64" version = "0.13.0" @@ -702,25 +611,34 @@ version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf9ff0bbfd639f15c74af777d81383cf53efb7c93613f6cab67c6c11e05bbf8b" +[[package]] +name = "bimap" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc0455254eb5c6964c4545d8bac815e1a1be4f3afe0ae695ea539c12d728d44b" +dependencies = [ + "serde 1.0.145", +] + [[package]] name = "bincode" version = "1.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" dependencies = [ - "serde 1.0.137", + "serde 1.0.145", ] [[package]] name = "bindgen" -version = "0.59.2" +version = "0.60.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bd2a9a458e8f4304c52c43ebb0cfbd520289f8379a52e329a38afda99bf8eb8" +checksum = "062dddbc1ba4aca46de6338e2bf87771414c335f7b2f2036e8f3e9befebf88e6" dependencies = [ "bitflags", "cexpr", "clang-sys", - "lazy_static 1.4.0", + "lazy_static", "lazycell", "peeking_take_while", "proc-macro2", @@ -732,9 +650,9 @@ dependencies = [ [[package]] name = "bit-set" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ "bit-vec", ] @@ -763,24 +681,13 @@ dependencies = [ "wyz", ] -[[package]] -name = "blake2" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4e37d16930f5459780f5621038b6382b9bb37c19016f39fb6b5808d831f174" -dependencies = [ - "crypto-mac 0.8.0", - "digest 0.9.0", - "opaque-debug 0.3.0", -] - [[package]] name = "blake2" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9cf849ee05b2ee5fba5e36f97ff8ec2533916700fc0758d40d92136a42f3388" dependencies = [ - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -815,7 +722,7 @@ dependencies = [ "cc", "cfg-if 1.0.0", "constant_time_eq", - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -837,16 +744,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ "block-padding 0.2.1", - "generic-array 0.14.5", + "generic-array 0.14.6", ] [[package]] name = "block-buffer" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" +checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", ] [[package]] @@ -919,19 +826,13 @@ dependencies = [ "syn", ] -[[package]] -name = "bs58" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" - [[package]] name = "bstr" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" dependencies = [ - "lazy_static 1.4.0", + "lazy_static", "memchr", "regex-automata", ] @@ -948,9 +849,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.10.0" +version = "3.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ccbd214614c6783386c1af30caf03192f17891059cecc394b4fb119e363de3" +checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" [[package]] name = "byte-slice-cast" @@ -975,9 +876,9 @@ dependencies = [ [[package]] name = "bytecheck" -version = "0.6.8" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a31f923c2db9513e4298b72df143e6e655a759b3d6a0966df18f81223fff54f" +checksum = "d11cac2c12b5adc6570dad2ee1b87eff4955dac476fe12d81e5fdd352e52406f" dependencies = [ "bytecheck_derive", "ptr_meta", @@ -985,9 +886,9 @@ dependencies = [ [[package]] name = "bytecheck_derive" -version = "0.6.8" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edb17c862a905d912174daa27ae002326fff56dc8b8ada50a0a5f0976cb174f0" +checksum = "13e576ebe98e605500b3c8041bb888e966653577172df6dd97398714eb30b9bf" dependencies = [ "proc-macro2", "quote", @@ -1012,15 +913,9 @@ dependencies = [ [[package]] name = "bytes" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" - -[[package]] -name = "bytes" -version = "1.1.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" +checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db" [[package]] name = "bytestring" @@ -1028,7 +923,7 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86b6a75fd3048808ef06af5cd79712be8111960adaf89d90250974b38fc3928a" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", ] [[package]] @@ -1048,24 +943,11 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1db59621ec70f09c5e9b597b220c7a2b43611f4710dc03ceb8748637775692c" -[[package]] -name = "cargo-watch" -version = "7.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97a4cf2908216028d1d97f49ed180367f009fdb3cd07550d0ef2db42bd6c739f" -dependencies = [ - "clap 2.34.0", - "log 0.4.17", - "shell-escape", - "stderrlog", - "watchexec", -] - [[package]] name = "cc" -version = "1.0.72" +version = "1.0.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" +checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" dependencies = [ "jobserver", ] @@ -1093,38 +975,13 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chacha20" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fee7ad89dc1128635074c268ee661f90c3f7e83d9fd12910608c36b47d6c3412" -dependencies = [ - "cfg-if 1.0.0", - "cipher", - "cpufeatures 0.1.5", - "zeroize", -] - -[[package]] -name = "chacha20" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01b72a433d0cf2aef113ba70f62634c56fddb0f244e6377185c56a7cadbd8f91" +checksum = "5c80e5460aa66fe3b91d40bcbdab953a597b60053e34d684ac6903f863b680a6" dependencies = [ "cfg-if 1.0.0", "cipher", - "cpufeatures 0.2.2", -] - -[[package]] -name = "chacha20poly1305" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1580317203210c517b6d44794abfbe600698276db18127e37ad3e69bf5e848e5" -dependencies = [ - "aead", - "chacha20 0.7.1", - "cipher", - "poly1305", - "zeroize", + "cpufeatures", ] [[package]] @@ -1134,11 +991,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1" dependencies = [ "iana-time-zone", - "js-sys", "num-integer", "num-traits 0.2.15", - "time 0.1.44", - "wasm-bindgen", "winapi 0.3.9", ] @@ -1154,7 +1008,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", ] [[package]] @@ -1168,31 +1022,15 @@ dependencies = [ [[package]] name = "clang-sys" -version = "1.3.3" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a050e2153c5be08febd6734e29298e844fdb0fa21aeddd63b4eb7baa106c69b" +checksum = "fa2e27ae6ab525c3d369ded447057bca5438d86dc3a68f6faafb8269ba82ebf3" dependencies = [ "glob", "libc", "libloading", ] -[[package]] -name = "clap" -version = "2.34.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" -dependencies = [ - "ansi_term", - "atty", - "bitflags", - "strsim 0.8.0", - "term_size", - "textwrap 0.11.0", - "unicode-width", - "vec_map", -] - [[package]] name = "clap" version = "3.0.0-beta.2" @@ -1201,31 +1039,31 @@ dependencies = [ "atty", "bitflags", "indexmap", - "lazy_static 1.4.0", + "lazy_static", "os_str_bytes", - "strsim 0.10.0", + "strsim", "termcolor", - "textwrap 0.12.1", + "textwrap", "unicode-width", "vec_map", ] [[package]] name = "clarity" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e043fca6ce2fabc4566fe447d2185a724529a383f3e9938279a53ea75532a02" +checksum = "880114aafee14fa3a183582a82407474d53f4950b1695658e95bbb5d049bb253" dependencies = [ - "lazy_static 1.4.0", + "lazy_static", "num-bigint 0.4.3", "num-traits 0.2.15", "num256", "secp256k1", - "serde 1.0.137", + "serde 1.0.145", "serde-rlp", "serde_bytes", "serde_derive", - "sha3 0.10.2", + "sha3 0.10.6", ] [[package]] @@ -1242,6 +1080,16 @@ name = "clru" version = "0.5.0" source = "git+https://github.com/marmeladema/clru-rs.git?rev=71ca566#71ca566915f21f3c308091ca7756a91b0f8b5afc" +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + [[package]] name = "color-eyre" version = "0.5.11" @@ -1265,7 +1113,7 @@ checksum = "b6eee477a4a8a72f4addd4de416eb56d54bc307b284d6601bafdee1f4ea462d1" dependencies = [ "once_cell", "owo-colors", - "tracing-core 0.1.27", + "tracing-core 0.1.30", "tracing-error", ] @@ -1281,9 +1129,9 @@ dependencies = [ [[package]] name = "concurrent-queue" -version = "1.2.2" +version = "1.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ed07550be01594c6026cff2a1d7fe9c8f683caa798e12b68694ac9e88286a3" +checksum = "af4780a44ab5696ea9e28294517f1fffb421a83a25af521333c838635509db9c" dependencies = [ "cache-padded", ] @@ -1294,10 +1142,10 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b1b9d958c2b1368a663f05538fc1b5975adce1e19f435acceae987aceeeb369" dependencies = [ - "lazy_static 1.4.0", + "lazy_static", "nom 5.1.2", "rust-ini", - "serde 1.0.137", + "serde 1.0.145", "serde-hjson", "serde_json", "toml", @@ -1343,18 +1191,9 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "cpufeatures" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66c99696f6c9dd7f35d486b9d04d7e6e202aa3e8c40d553f2fdf5e7e0c6a71ef" -dependencies = [ - "libc", -] - -[[package]] -name = "cpufeatures" -version = "0.2.2" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b" +checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" dependencies = [ "libc", ] @@ -1381,7 +1220,7 @@ dependencies = [ "gimli 0.25.0", "log 0.4.17", "regalloc", - "smallvec 1.8.0", + "smallvec 1.10.0", "target-lexicon", ] @@ -1415,7 +1254,7 @@ checksum = "279afcc0d3e651b773f94837c3d581177b348c8d69e928104b2e9fccb226f921" dependencies = [ "cranelift-codegen", "log 0.4.17", - "smallvec 1.8.0", + "smallvec 1.10.0", "target-lexicon", ] @@ -1430,35 +1269,34 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53" +checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" dependencies = [ "cfg-if 1.0.0", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.12", ] [[package]] name = "crossbeam-deque" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" dependencies = [ "cfg-if 1.0.0", "crossbeam-epoch", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.12", ] [[package]] name = "crossbeam-epoch" -version = "0.9.8" +version = "0.9.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1145cf131a2c6ba0615079ab6a638f7e1973ac9c2634fcbeaaad6114246efe8c" +checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348" dependencies = [ "autocfg 1.1.0", "cfg-if 1.0.0", - "crossbeam-utils 0.8.8", - "lazy_static 1.4.0", + "crossbeam-utils 0.8.12", "memoffset", "scopeguard", ] @@ -1471,17 +1309,16 @@ checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" dependencies = [ "autocfg 1.1.0", "cfg-if 0.1.10", - "lazy_static 1.4.0", + "lazy_static", ] [[package]] name = "crossbeam-utils" -version = "0.8.8" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" +checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac" dependencies = [ "cfg-if 1.0.0", - "lazy_static 1.4.0", ] [[package]] @@ -1492,32 +1329,22 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" [[package]] name = "crypto-common" -version = "0.1.3" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", "typenum", ] -[[package]] -name = "crypto-mac" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4434400df11d95d556bac068ddfedd482915eb18fe8bea89bc80b6e4b1c179e5" -dependencies = [ - "generic-array 0.12.4", - "subtle 1.0.0", -] - [[package]] name = "crypto-mac" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" dependencies = [ - "generic-array 0.14.5", - "subtle 2.4.1", + "generic-array 0.14.6", + "subtle", ] [[package]] @@ -1537,40 +1364,20 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.22" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f877be4f7c9f246b183111634f75baa039715e3f46ce860677d3b19a69fb229c" +checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096" dependencies = [ "quote", "syn", ] -[[package]] -name = "ctr" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "049bb91fb4aaf0e3c7efa6cd5ef877dbbbd15b39dad06d9948de4ec8a75761ea" -dependencies = [ - "cipher", -] - [[package]] name = "cty" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b365fabc795046672053e29c954733ec3b05e4be654ab130fe8f1f94d7051f35" -[[package]] -name = "cuckoofilter" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b810a8449931679f64cd7eef1bbd0fa315801b6d5d9cdc1ace2804d6529eee18" -dependencies = [ - "byteorder", - "fnv", - "rand 0.7.3", -] - [[package]] name = "curve25519-dalek" version = "3.2.0" @@ -1580,7 +1387,7 @@ dependencies = [ "byteorder", "digest 0.9.0", "rand_core 0.5.1", - "subtle 2.4.1", + "subtle", "zeroize", ] @@ -1592,76 +1399,85 @@ checksum = "1c359b7249347e46fb28804470d071c921156ad62b3eef5d34e2ba867533dec8" dependencies = [ "byteorder", "digest 0.9.0", - "rand_core 0.6.3", + "rand_core 0.6.4", "subtle-ng", "zeroize", ] [[package]] -name = "darling" -version = "0.12.4" +name = "cxx" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f2c43f534ea4b0b049015d00269734195e6d3f0f6635cb692251aca6f9f8b3c" +checksum = "3f83d0ebf42c6eafb8d7c52f7e5f2d3003b89c7aa4fd2b79229209459a849af8" dependencies = [ - "darling_core 0.12.4", - "darling_macro 0.12.4", + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", ] [[package]] -name = "darling" -version = "0.13.4" +name = "cxx-build" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +checksum = "07d050484b55975889284352b0ffc2ecbda25c0c55978017c132b29ba0818a86" dependencies = [ - "darling_core 0.13.4", - "darling_macro 0.13.4", + "cc", + "codespan-reporting", + "once_cell", + "proc-macro2", + "quote", + "scratch", + "syn", ] [[package]] -name = "darling_core" -version = "0.12.4" +name = "cxxbridge-flags" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d2199b00553eda8012dfec8d3b1c75fce747cf27c169a270b3b99e3448ab78" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e91455b86830a1c21799d94524df0845183fa55bafd9aa137b01c7d1065fa36" +checksum = "dcb67a6de1f602736dd7eaead0080cf3435df806c61b24b13328db128c58868f" dependencies = [ - "fnv", - "ident_case", "proc-macro2", "quote", - "strsim 0.10.0", "syn", ] [[package]] -name = "darling_core" -version = "0.13.4" +name = "darling" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +checksum = "4529658bdda7fd6769b8614be250cdcfc3aeb0ee72fe66f9e41e5e5eb73eac02" dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "syn", + "darling_core", + "darling_macro", ] [[package]] -name = "darling_macro" -version = "0.12.4" +name = "darling_core" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29b5acf0dea37a7f66f7b25d2c5e93fd46f8f6968b1a5d7a3e02e97768afc95a" +checksum = "649c91bc01e8b1eac09fb91e8dbc7d517684ca6be8ebc75bb9cafc894f9fdb6f" dependencies = [ - "darling_core 0.12.4", + "fnv", + "ident_case", + "proc-macro2", "quote", "syn", ] [[package]] name = "darling_macro" -version = "0.13.4" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +checksum = "ddfc69c5bfcbd2fc09a0f38451d2daf0e372e367986a83906d1b0dbc88134fb5" dependencies = [ - "darling_core 0.13.4", + "darling_core", "quote", "syn", ] @@ -1684,54 +1500,23 @@ dependencies = [ ] [[package]] -name = "derive_builder" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d13202debe11181040ae9063d739fa32cfcaaebe2275fe387703460ae2365b30" -dependencies = [ - "derive_builder_macro", -] - -[[package]] -name = "derive_builder_core" -version = "0.10.2" +name = "derive_more" +version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66e616858f6187ed828df7c64a6d71720d83767a7f19740b2d1b6fe6327b36e5" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ - "darling 0.12.4", + "convert_case", "proc-macro2", "quote", + "rustc_version 0.4.0", "syn", ] [[package]] -name = "derive_builder_macro" -version = "0.10.2" +name = "diff" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58a94ace95092c5acb1e97a7e846b310cfbd499652f72297da7493f618a98d73" -dependencies = [ - "derive_builder_core", - "syn", -] - -[[package]] -name = "derive_more" -version = "0.99.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" -dependencies = [ - "convert_case", - "proc-macro2", - "quote", - "rustc_version 0.4.0", - "syn", -] - -[[package]] -name = "diff" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" [[package]] name = "difflib" @@ -1754,48 +1539,18 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", ] [[package]] name = "digest" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" +checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c" dependencies = [ - "block-buffer 0.10.2", + "block-buffer 0.10.3", "crypto-common", - "subtle 2.4.1", -] - -[[package]] -name = "directories" -version = "4.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f51c5d4ddabd36886dd3e1438cb358cdcb0d7c499cb99cb4ac2e38e18b5cb210" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-sys" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" -dependencies = [ - "libc", - "redox_users", - "winapi 0.3.9", -] - -[[package]] -name = "dns-parser" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4d33be9473d06f75f58220f71f7a9317aca647dc061dbd3c361b0bef505fbea" -dependencies = [ - "byteorder", - "quick-error 1.2.3", + "subtle", ] [[package]] @@ -1818,7 +1573,7 @@ checksum = "add9a102807b524ec050363f09e06f1504214b0e1c7797f64261c891022dce8b" dependencies = [ "bitflags", "byteorder", - "lazy_static 1.4.0", + "lazy_static", "proc-macro-error", "proc-macro2", "quote", @@ -1842,7 +1597,7 @@ version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e9c280362032ea4203659fc489832d0204ef09f247a0506f170dafcac08c369" dependencies = [ - "serde 1.0.137", + "serde 1.0.145", "signature", ] @@ -1854,8 +1609,8 @@ checksum = "758e2a0cd8a6cdf483e1d369e7d081647e00b88d8953e34d8f2cbba05ae28368" dependencies = [ "curve25519-dalek-ng", "hex", - "rand_core 0.6.3", - "serde 1.0.137", + "rand_core 0.6.4", + "serde 1.0.145", "sha2 0.9.9", "thiserror", "zeroize", @@ -1871,7 +1626,7 @@ dependencies = [ "ed25519", "merlin", "rand 0.7.3", - "serde 1.0.137", + "serde 1.0.145", "serde_bytes", "sha2 0.9.9", "zeroize", @@ -1879,22 +1634,9 @@ dependencies = [ [[package]] name = "either" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" - -[[package]] -name = "embed-resource" -version = "1.7.2" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc24ff8d764818e9ab17963b0593c535f077a513f565e75e4352d758bc4d8c0" -dependencies = [ - "cc", - "rustc_version 0.4.0", - "toml", - "vswhom", - "winreg 0.10.1", -] +checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" [[package]] name = "encoding_rs" @@ -1905,18 +1647,6 @@ dependencies = [ "cfg-if 1.0.0", ] -[[package]] -name = "enum-as-inner" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "570d109b813e904becc80d8d5da38376818a143348413f7149f1340fe04754d4" -dependencies = [ - "heck 0.4.0", - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "enum-iterator" version = "0.7.0" @@ -1939,34 +1669,25 @@ dependencies = [ [[package]] name = "enumset" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4799cdb24d48f1f8a7a98d06b7fde65a85a2d1e42b25a889f5406aa1fbefe074" +checksum = "19be8061a06ab6f3a6cf21106c873578bf01bd42ad15e0311a9c76161cb1c753" dependencies = [ "enumset_derive", ] [[package]] name = "enumset_derive" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea83a3fbdc1d999ccfbcbee717eab36f8edf2d71693a23ce0d7cca19e085304c" +checksum = "03e7b551eba279bf0fa88b83a46330168c1560a52a94f5126f892f0b364ab3e0" dependencies = [ - "darling 0.13.4", + "darling", "proc-macro2", "quote", "syn", ] -[[package]] -name = "env_logger" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" -dependencies = [ - "log 0.4.17", -] - [[package]] name = "error" version = "0.1.9" @@ -1985,7 +1706,7 @@ checksum = "f5584ba17d7ab26a8a7284f13e5bd196294dd2f2d79773cff29b9e9edef601a6" dependencies = [ "log 0.4.17", "once_cell", - "serde 1.0.137", + "serde 1.0.145", "serde_json", ] @@ -1999,9 +1720,9 @@ dependencies = [ "hex", "once_cell", "regex", - "serde 1.0.137", + "serde 1.0.145", "serde_json", - "sha3 0.10.2", + "sha3 0.10.6", "thiserror", "uint", ] @@ -2035,18 +1756,17 @@ dependencies = [ [[package]] name = "event-listener" -version = "2.5.2" +version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77f3309417938f28bf8228fcff79a4a37103981e3e186d2ccd19c74b38f4eb71" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "expectrl" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2795e11f4ee3124984d454f25ac899515a5fa6d956562ef2b147fef6050b02f8" +version = "0.6.0" +source = "git+https://github.com/james-chf/expectrl.git?branch=james/raw-logger#c7b409004bb6d088df125bd9adda799ef6e316dc" dependencies = [ "conpty", - "nix 0.23.1", + "nix 0.25.0", "ptyprocess", "regex", ] @@ -2075,9 +1795,9 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" [[package]] name = "fastrand" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" +checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" dependencies = [ "instant", ] @@ -2085,7 +1805,7 @@ dependencies = [ [[package]] name = "ferveo" version = "0.1.1" -source = "git+https://github.com/anoma/ferveo#8363c33d1cf79f93ce9fa89d4b5fe998a5a78c26" +source = "git+https://github.com/anoma/ferveo#1022ab2c7ccc689abcc05e5a08df6fb0c2a3fc65" dependencies = [ "anyhow", "ark-bls12-381", @@ -2096,39 +1816,39 @@ dependencies = [ "ark-serialize", "ark-std", "bincode", - "blake2 0.10.4", + "blake2", "blake2b_simd", "borsh", - "digest 0.10.3", + "digest 0.10.5", "ed25519-dalek", "either", "ferveo-common", "group-threshold-cryptography", "hex", - "itertools 0.10.3", + "itertools", "measure_time", "miracl_core", "num 0.4.0", "rand 0.7.3", "rand 0.8.5", - "serde 1.0.137", + "serde 1.0.145", "serde_bytes", "serde_json", "subproductdomain", - "subtle 2.4.1", + "subtle", "zeroize", ] [[package]] name = "ferveo-common" version = "0.1.0" -source = "git+https://github.com/anoma/ferveo#8363c33d1cf79f93ce9fa89d4b5fe998a5a78c26" +source = "git+https://github.com/anoma/ferveo#1022ab2c7ccc689abcc05e5a08df6fb0c2a3fc65" dependencies = [ "anyhow", "ark-ec", "ark-serialize", "ark-std", - "serde 1.0.137", + "serde 1.0.145", "serde_bytes", ] @@ -2141,14 +1861,14 @@ dependencies = [ "cc", "libc", "mktemp", - "nix 0.24.1", + "nix 0.24.2", ] [[package]] name = "file-serve" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a09c8127b1a49f66ac56a7c9efe420e2ab23e00266ea4144cc2b905076a7f1" +checksum = "e43addbb09a5dcb5609cb44a01a79e67716fe40b50c109f50112ef201a8c7c59" dependencies = [ "log 0.4.17", "mime_guess", @@ -2157,14 +1877,14 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0408e2626025178a6a7f7ffc05a25bc47103229f19c113755de7bf63816290c" +checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.2.13", - "winapi 0.3.9", + "redox_syscall 0.2.16", + "windows-sys 0.36.1", ] [[package]] @@ -2181,15 +1901,9 @@ dependencies = [ [[package]] name = "fixedbitset" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d" - -[[package]] -name = "fixedbitset" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "279fb028e20b3c4c320317955b77c5e0c9701f05a1d309905d6fc702cdc5053e" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" @@ -2198,7 +1912,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" dependencies = [ "crc32fast", - "libz-sys", "miniz_oxide", ] @@ -2235,12 +1948,11 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" dependencies = [ - "matches", - "percent-encoding 2.1.0", + "percent-encoding 2.2.0", ] [[package]] @@ -2249,25 +1961,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394" -[[package]] -name = "fsevent" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" -dependencies = [ - "bitflags", - "fsevent-sys", -] - -[[package]] -name = "fsevent-sys" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0" -dependencies = [ - "libc", -] - [[package]] name = "fuchsia-cprng" version = "0.1.1" @@ -2304,9 +1997,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" +checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0" dependencies = [ "futures-channel", "futures-core", @@ -2319,9 +2012,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" +checksum = "52ba265a92256105f45b719605a571ffe2d1f0fea3807304b522c1d778f79eed" dependencies = [ "futures-core", "futures-sink", @@ -2329,27 +2022,26 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" +checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac" [[package]] name = "futures-executor" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9420b90cfa29e327d0429f19be13e7ddb68fa1cccb09d65e5706b8c7a749b8a6" +checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2" dependencies = [ "futures-core", "futures-task", "futures-util", - "num_cpus", ] [[package]] name = "futures-io" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" +checksum = "00f5fb52a06bdcadeb54e8d3671f8888a39697dcb0b81b23b55174030427f4eb" [[package]] name = "futures-lite" @@ -2362,55 +2054,38 @@ dependencies = [ "futures-io", "memchr", "parking", - "pin-project-lite 0.2.9", + "pin-project-lite", "waker-fn", ] [[package]] name = "futures-macro" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512" +checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d" dependencies = [ "proc-macro2", "quote", "syn", ] -[[package]] -name = "futures-rustls" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a1387e07917c711fb4ee4f48ea0adb04a3c9739e53ef85bf43ae1edc2937a8b" -dependencies = [ - "futures-io", - "rustls", - "webpki", -] - [[package]] name = "futures-sink" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" +checksum = "39c15cf1a4aa79df40f1bb462fb39676d0ad9e366c2a33b590d7c66f4f81fcf9" [[package]] name = "futures-task" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" - -[[package]] -name = "futures-timer" -version = "3.0.2" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" +checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea" [[package]] name = "futures-util" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" +checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6" dependencies = [ "futures-channel", "futures-core", @@ -2419,7 +2094,7 @@ dependencies = [ "futures-sink", "futures-task", "memchr", - "pin-project-lite 0.2.9", + "pin-project-lite", "pin-utils", "slab", ] @@ -2435,9 +2110,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.5" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" dependencies = [ "typenum", "version_check 0.9.4", @@ -2456,23 +2131,13 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad" +checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6" dependencies = [ "cfg-if 1.0.0", "libc", - "wasi 0.10.0+wasi-snapshot-preview1", -] - -[[package]] -name = "ghash" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1583cc1656d7839fd3732b80cf4f38850336cdb9b8ded1cd399ca62958de3c99" -dependencies = [ - "opaque-debug 0.3.0", - "polyval", + "wasi 0.11.0+wasi-snapshot-preview1", ] [[package]] @@ -2488,9 +2153,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.26.1" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4" +checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d" [[package]] name = "git2" @@ -2504,7 +2169,7 @@ dependencies = [ "log 0.4.17", "openssl-probe", "openssl-sys", - "url 2.2.2", + "url 2.3.1", ] [[package]] @@ -2513,19 +2178,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" -[[package]] -name = "globset" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10463d9ff00a2a068db14231982f5132edebad0d7660cd956a1c30292dbcbfbd" -dependencies = [ - "aho-corasick", - "bstr", - "fnv", - "log 0.4.17", - "regex", -] - [[package]] name = "gloo-timers" version = "0.2.4" @@ -2541,7 +2193,7 @@ dependencies = [ [[package]] name = "group-threshold-cryptography" version = "0.1.0" -source = "git+https://github.com/anoma/ferveo#8363c33d1cf79f93ce9fa89d4b5fe998a5a78c26" +source = "git+https://github.com/anoma/ferveo#1022ab2c7ccc689abcc05e5a08df6fb0c2a3fc65" dependencies = [ "anyhow", "ark-bls12-381", @@ -2551,12 +2203,12 @@ dependencies = [ "ark-serialize", "ark-std", "blake2b_simd", - "chacha20 0.8.1", + "chacha20", "hex", - "itertools 0.10.3", + "itertools", "miracl_core", "rand 0.8.5", - "rand_core 0.6.3", + "rand_core 0.6.4", "rayon", "subproductdomain", "thiserror", @@ -2584,11 +2236,11 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37a82c6d637fc9515a4694bbf1cb2457b79d81ce52b3108bdeea58b07dd34a57" +checksum = "5ca32592cf21ac7ccab1825cd87f6c9b3d9022c44d086172ed0966bec8af30be" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "fnv", "futures-core", "futures-sink", @@ -2597,8 +2249,8 @@ dependencies = [ "indexmap", "slab", "tokio", - "tokio-util 0.7.3", - "tracing 0.1.35", + "tokio-util 0.7.4", + "tracing 0.1.37", ] [[package]] @@ -2612,41 +2264,37 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ "ahash", ] [[package]] name = "hdrhistogram" -version = "7.5.0" +version = "7.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31672b7011be2c4f7456c4ddbcb40e7e9a4a9fad8efe49a6ebaf5f307d0109c0" +checksum = "7f19b9f54f7c7f55e31401bb647626ce0cf0f67b0004982ce815b3ee72a02aa8" dependencies = [ - "base64 0.13.0", "byteorder", - "crossbeam-channel", - "flate2", - "nom 7.1.1", "num-traits 0.2.15", ] [[package]] name = "headers" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cff78e5788be1e0ab65b04d306b2ed5092c815ec97ec70f4ebd5aee158aa55d" +checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" dependencies = [ "base64 0.13.0", "bitflags", - "bytes 1.1.0", + "bytes 1.2.1", "headers-core", "http", "httpdate", "mime 0.3.16", - "sha-1 0.10.0", + "sha1", ] [[package]] @@ -2688,43 +2336,16 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" -[[package]] -name = "hex_fmt" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b07f60793ff0a4d9cef0f18e63b5357e06209987153a64648c972c1e5aff336f" - -[[package]] -name = "hmac" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dcb5e64cda4c23119ab41ba960d1e170a774c8e4b9d9e6a9bc18aabf5e59695" -dependencies = [ - "crypto-mac 0.7.0", - "digest 0.8.1", -] - [[package]] name = "hmac" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" dependencies = [ - "crypto-mac 0.8.0", + "crypto-mac", "digest 0.9.0", ] -[[package]] -name = "hmac-drbg" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6e570451493f10f6581b48cdd530413b63ea9e780f544bfd3bdcaa0d89d1a7b" -dependencies = [ - "digest 0.8.1", - "generic-array 0.12.4", - "hmac 0.7.1", -] - [[package]] name = "hmac-drbg" version = "0.3.0" @@ -2732,19 +2353,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" dependencies = [ "digest 0.9.0", - "generic-array 0.14.5", - "hmac 0.8.1", -] - -[[package]] -name = "hostname" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" -dependencies = [ - "libc", - "match_cfg", - "winapi 0.3.9", + "generic-array 0.14.6", + "hmac", ] [[package]] @@ -2753,7 +2363,7 @@ version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "fnv", "itoa", ] @@ -2764,16 +2374,16 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "http", - "pin-project-lite 0.2.9", + "pin-project-lite", ] [[package]] name = "httparse" -version = "1.7.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "496ce29bb5a52785b44e0f7ca2847ae0bb839c9bd28f69acac9b99d461c0c04c" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" @@ -2802,11 +2412,11 @@ dependencies = [ [[package]] name = "hyper" -version = "0.14.19" +version = "0.14.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42dc3c131584288d375f2d07f822b0cb012d8c6fb899a5b9fdb3cb7eb9b6004f" +checksum = "02c929dc5c39e335a03c405292728118860721b10190d98c2a0f0efd5baafbac" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "futures-channel", "futures-core", "futures-util", @@ -2816,11 +2426,11 @@ dependencies = [ "httparse", "httpdate", "itoa", - "pin-project-lite 0.2.9", - "socket2 0.4.4", + "pin-project-lite", + "socket2", "tokio", "tower-service", - "tracing 0.1.35", + "tracing 0.1.37", "want", ] @@ -2830,11 +2440,11 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca815a891b24fdfb243fa3239c86154392b0953ee584aa1a2a1f66d20cbe75cc" dependencies = [ - "bytes 1.1.0", - "futures 0.3.21", + "bytes 1.2.1", + "futures 0.3.25", "headers", "http", - "hyper 0.14.19", + "hyper 0.14.20", "hyper-rustls", "rustls-native-certs", "tokio", @@ -2851,7 +2461,7 @@ checksum = "5f9f7a97316d44c0af9b0301e65010573a853a9fc97046d7331d7f6bc0fd5a64" dependencies = [ "ct-logs", "futures-util", - "hyper 0.14.19", + "hyper 0.14.20", "log 0.4.17", "rustls", "rustls-native-certs", @@ -2867,8 +2477,8 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "hyper 0.14.19", - "pin-project-lite 0.2.9", + "hyper 0.14.20", + "pin-project-lite", "tokio", "tokio-io-timeout", ] @@ -2879,8 +2489,8 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes 1.1.0", - "hyper 0.14.19", + "bytes 1.2.1", + "hyper 0.14.20", "native-tls", "tokio", "tokio-native-tls", @@ -2888,42 +2498,53 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.50" +version = "0.1.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd911b35d940d2bd0bea0f9100068e5b97b51a1cbe13d13382f132e0365257a0" +checksum = "f5a6ef98976b22b3b7f2f3a806f858cb862044cfa66805aa3ad84cb3d3b785ed" dependencies = [ "android_system_properties", "core-foundation-sys", + "iana-time-zone-haiku", "js-sys", "wasm-bindgen", "winapi 0.3.9", ] +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +dependencies = [ + "cxx", + "cxx-build", +] + [[package]] name = "ibc" version = "0.14.0" source = "git+https://github.com/heliaxdev/ibc-rs?rev=9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d#9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "derive_more", "flex-error", "ibc-proto 0.17.1 (git+https://github.com/heliaxdev/ibc-rs?rev=9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d)", "ics23", "num-traits 0.2.15", - "prost 0.9.0", - "prost-types 0.9.0", + "prost", + "prost-types", "safe-regex", - "serde 1.0.137", + "serde 1.0.145", "serde_derive", "serde_json", - "sha2 0.10.2", + "sha2 0.10.6", "subtle-encoding", "tendermint 0.23.5", "tendermint-light-client-verifier 0.23.5", "tendermint-proto 0.23.5", "tendermint-testgen 0.23.5", - "time 0.3.9", - "tracing 0.1.35", + "time 0.3.15", + "tracing 0.1.37", ] [[package]] @@ -2931,26 +2552,26 @@ name = "ibc" version = "0.14.0" source = "git+https://github.com/heliaxdev/ibc-rs.git?rev=f4703dfe2c1f25cc431279ab74f10f3e0f6827e2#f4703dfe2c1f25cc431279ab74f10f3e0f6827e2" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "derive_more", "flex-error", "ibc-proto 0.17.1 (git+https://github.com/heliaxdev/ibc-rs.git?rev=f4703dfe2c1f25cc431279ab74f10f3e0f6827e2)", "ics23", "num-traits 0.2.15", - "prost 0.9.0", - "prost-types 0.9.0", + "prost", + "prost-types", "safe-regex", - "serde 1.0.137", + "serde 1.0.145", "serde_derive", "serde_json", - "sha2 0.10.2", + "sha2 0.10.6", "subtle-encoding", "tendermint 0.23.6", "tendermint-light-client-verifier 0.23.6", "tendermint-proto 0.23.6", "tendermint-testgen 0.23.6", - "time 0.3.9", - "tracing 0.1.35", + "time 0.3.15", + "tracing 0.1.37", ] [[package]] @@ -2959,10 +2580,10 @@ version = "0.17.1" source = "git+https://github.com/heliaxdev/ibc-rs?rev=9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d#9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d" dependencies = [ "base64 0.13.0", - "bytes 1.1.0", - "prost 0.9.0", - "prost-types 0.9.0", - "serde 1.0.137", + "bytes 1.2.1", + "prost", + "prost-types", + "serde 1.0.145", "tendermint-proto 0.23.5", ] @@ -2972,10 +2593,10 @@ version = "0.17.1" source = "git+https://github.com/heliaxdev/ibc-rs.git?rev=f4703dfe2c1f25cc431279ab74f10f3e0f6827e2#f4703dfe2c1f25cc431279ab74f10f3e0f6827e2" dependencies = [ "base64 0.13.0", - "bytes 1.1.0", - "prost 0.9.0", - "prost-types 0.9.0", - "serde 1.0.137", + "bytes 1.2.1", + "prost", + "prost-types", + "serde 1.0.145", "tendermint-proto 0.23.6", ] @@ -2986,9 +2607,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d454cc0a22bd556cc3d3c69f9d75a392a36244634840697a4b9eb81bc5c8ae0" dependencies = [ "anyhow", - "bytes 1.1.0", + "bytes 1.2.1", "hex", - "prost 0.9.0", + "prost", "ripemd160", "sha2 0.9.9", "sha3 0.9.1", @@ -3014,52 +2635,14 @@ dependencies = [ [[package]] name = "idna" -version = "0.2.3" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" dependencies = [ - "matches", "unicode-bidi", "unicode-normalization", ] -[[package]] -name = "if-addrs" -version = "0.6.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2273e421f7c4f0fc99e1934fe4776f59d8df2972f4199d703fc0da9f2a9f73de" -dependencies = [ - "if-addrs-sys", - "libc", - "winapi 0.3.9", -] - -[[package]] -name = "if-addrs-sys" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de74b9dd780476e837e5eb5ab7c88b49ed304126e412030a0adba99c8efe79ea" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "if-watch" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae8ab7f67bad3240049cb24fb9cb0b4c2c6af4c245840917fbbdededeee91179" -dependencies = [ - "async-io", - "futures 0.3.21", - "futures-lite", - "if-addrs", - "ipnet", - "libc", - "log 0.4.17", - "winapi 0.3.9", -] - [[package]] name = "impl-codec" version = "0.6.0" @@ -3084,7 +2667,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4551f042f3438e64dbd6226b20527fc84a6e1fe65688b58746a2f53623f25f5c" dependencies = [ - "serde 1.0.137", + "serde 1.0.145", ] [[package]] @@ -3106,33 +2689,13 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" [[package]] name = "indexmap" -version = "1.8.2" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6012d540c5baa3589337a98ce73408de9b5a25ec9fc2c6fd6be8f0d39e0ca5a" +checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" dependencies = [ "autocfg 1.1.0", - "hashbrown 0.11.2", - "serde 1.0.137", -] - -[[package]] -name = "inotify" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4816c66d2c8ae673df83366c18341538f234a26d65a9ecea5c348b453ac1d02f" -dependencies = [ - "bitflags", - "inotify-sys", - "libc", -] - -[[package]] -name = "inotify-sys" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" -dependencies = [ - "libc", + "hashbrown 0.12.3", + "serde 1.0.145", ] [[package]] @@ -3141,7 +2704,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f97967975f448f1a7ddb12b0bc41069d09ed6a1c161a92687e057325db35d413" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", ] [[package]] @@ -3158,9 +2721,9 @@ dependencies = [ [[package]] name = "integer-encoding" -version = "3.0.3" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e85a1509a128c855368e135cffcde7eac17d8e1083f41e2b98c58bc1a5074be" +checksum = "8bb03732005da905c88227371639bf1ad885cc712789c011c31c5fb3ab3ccf02" [[package]] name = "iovec" @@ -3171,18 +2734,6 @@ dependencies = [ "libc", ] -[[package]] -name = "ipconfig" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7e2f18aece9709094573a9f24f483c4f65caa4298e2f7ae1b71cc65d853fad7" -dependencies = [ - "socket2 0.3.19", - "widestring", - "winapi 0.3.9", - "winreg 0.6.2", -] - [[package]] name = "ipnet" version = "2.5.0" @@ -3191,42 +2742,33 @@ checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b" [[package]] name = "itertools" -version = "0.9.0" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] [[package]] name = "itoa" -version = "1.0.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" +checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" [[package]] name = "jobserver" -version = "0.1.24" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa" +checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b" dependencies = [ "libc", ] [[package]] name = "js-sys" -version = "0.3.57" +version = "0.3.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "671a26f820db17c2a2750743f1dd03bafd15b98c9f30c7c2628c024c05d73397" +checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47" dependencies = [ "wasm-bindgen", ] @@ -3268,12 +2810,6 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388" -[[package]] -name = "lazy_static" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" - [[package]] name = "lazy_static" version = "1.4.0" @@ -3307,9 +2843,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.134" +version = "0.2.135" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "329c933548736bc49fd575ee68c89e8be4d260064184389a5b77517cddd99ffb" +checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c" [[package]] name = "libgit2-sys" @@ -3335,422 +2871,11 @@ dependencies = [ "winapi 0.3.9", ] -[[package]] -name = "libp2p" -version = "0.38.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "atomic", - "bytes 1.1.0", - "futures 0.3.21", - "lazy_static 1.4.0", - "libp2p-core", - "libp2p-deflate", - "libp2p-dns", - "libp2p-floodsub", - "libp2p-gossipsub", - "libp2p-identify", - "libp2p-kad", - "libp2p-mdns", - "libp2p-mplex", - "libp2p-noise", - "libp2p-ping", - "libp2p-plaintext", - "libp2p-pnet", - "libp2p-relay", - "libp2p-request-response", - "libp2p-swarm", - "libp2p-swarm-derive", - "libp2p-tcp", - "libp2p-uds", - "libp2p-wasm-ext", - "libp2p-websocket", - "libp2p-yamux", - "parity-multiaddr", - "parking_lot 0.11.2", - "pin-project 1.0.10", - "smallvec 1.8.0", - "wasm-timer", -] - -[[package]] -name = "libp2p-core" -version = "0.28.3" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "asn1_der", - "bs58", - "ed25519-dalek", - "either", - "fnv", - "futures 0.3.21", - "futures-timer", - "lazy_static 1.4.0", - "libsecp256k1 0.3.5", - "log 0.4.17", - "multihash", - "multistream-select", - "parity-multiaddr", - "parking_lot 0.11.2", - "pin-project 1.0.10", - "prost 0.7.0", - "prost-build 0.7.0", - "rand 0.7.3", - "ring", - "rw-stream-sink", - "sha2 0.9.9", - "smallvec 1.8.0", - "thiserror", - "unsigned-varint 0.7.1", - "void", - "zeroize", -] - -[[package]] -name = "libp2p-deflate" -version = "0.28.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "flate2", - "futures 0.3.21", - "libp2p-core", -] - -[[package]] -name = "libp2p-dns" -version = "0.28.1" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "async-std-resolver", - "futures 0.3.21", - "libp2p-core", - "log 0.4.17", - "smallvec 1.8.0", - "trust-dns-resolver", -] - -[[package]] -name = "libp2p-floodsub" -version = "0.29.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "cuckoofilter", - "fnv", - "futures 0.3.21", - "libp2p-core", - "libp2p-swarm", - "log 0.4.17", - "prost 0.7.0", - "prost-build 0.7.0", - "rand 0.7.3", - "smallvec 1.8.0", -] - -[[package]] -name = "libp2p-gossipsub" -version = "0.31.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "asynchronous-codec", - "base64 0.13.0", - "byteorder", - "bytes 1.1.0", - "fnv", - "futures 0.3.21", - "hex_fmt", - "libp2p-core", - "libp2p-swarm", - "log 0.4.17", - "prost 0.7.0", - "prost-build 0.7.0", - "rand 0.7.3", - "regex", - "sha2 0.9.9", - "smallvec 1.8.0", - "unsigned-varint 0.7.1", - "wasm-timer", -] - -[[package]] -name = "libp2p-identify" -version = "0.29.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "futures 0.3.21", - "libp2p-core", - "libp2p-swarm", - "log 0.4.17", - "prost 0.7.0", - "prost-build 0.7.0", - "smallvec 1.8.0", - "wasm-timer", -] - -[[package]] -name = "libp2p-kad" -version = "0.30.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "arrayvec 0.5.2", - "asynchronous-codec", - "bytes 1.1.0", - "either", - "fnv", - "futures 0.3.21", - "libp2p-core", - "libp2p-swarm", - "log 0.4.17", - "prost 0.7.0", - "prost-build 0.7.0", - "rand 0.7.3", - "sha2 0.9.9", - "smallvec 1.8.0", - "uint", - "unsigned-varint 0.7.1", - "void", - "wasm-timer", -] - -[[package]] -name = "libp2p-mdns" -version = "0.30.2" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "async-io", - "data-encoding", - "dns-parser", - "futures 0.3.21", - "if-watch", - "lazy_static 1.4.0", - "libp2p-core", - "libp2p-swarm", - "log 0.4.17", - "rand 0.8.5", - "smallvec 1.8.0", - "socket2 0.4.4", - "void", -] - -[[package]] -name = "libp2p-mplex" -version = "0.28.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "asynchronous-codec", - "bytes 1.1.0", - "futures 0.3.21", - "libp2p-core", - "log 0.4.17", - "nohash-hasher", - "parking_lot 0.11.2", - "rand 0.7.3", - "smallvec 1.8.0", - "unsigned-varint 0.7.1", -] - -[[package]] -name = "libp2p-noise" -version = "0.31.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "bytes 1.1.0", - "curve25519-dalek", - "futures 0.3.21", - "lazy_static 1.4.0", - "libp2p-core", - "log 0.4.17", - "prost 0.7.0", - "prost-build 0.7.0", - "rand 0.8.5", - "sha2 0.9.9", - "snow", - "static_assertions", - "x25519-dalek", - "zeroize", -] - -[[package]] -name = "libp2p-ping" -version = "0.29.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "futures 0.3.21", - "libp2p-core", - "libp2p-swarm", - "log 0.4.17", - "rand 0.7.3", - "void", - "wasm-timer", -] - -[[package]] -name = "libp2p-plaintext" -version = "0.28.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "asynchronous-codec", - "bytes 1.1.0", - "futures 0.3.21", - "libp2p-core", - "log 0.4.17", - "prost 0.7.0", - "prost-build 0.7.0", - "unsigned-varint 0.7.1", - "void", -] - -[[package]] -name = "libp2p-pnet" -version = "0.21.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "futures 0.3.21", - "log 0.4.17", - "pin-project 1.0.10", - "rand 0.7.3", - "salsa20", - "sha3 0.9.1", -] - -[[package]] -name = "libp2p-relay" -version = "0.2.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "asynchronous-codec", - "bytes 1.1.0", - "futures 0.3.21", - "futures-timer", - "libp2p-core", - "libp2p-swarm", - "log 0.4.17", - "pin-project 1.0.10", - "prost 0.7.0", - "prost-build 0.7.0", - "rand 0.7.3", - "smallvec 1.8.0", - "unsigned-varint 0.7.1", - "void", - "wasm-timer", -] - -[[package]] -name = "libp2p-request-response" -version = "0.11.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "async-trait", - "bytes 1.1.0", - "futures 0.3.21", - "libp2p-core", - "libp2p-swarm", - "log 0.4.17", - "lru", - "minicbor", - "rand 0.7.3", - "smallvec 1.8.0", - "unsigned-varint 0.7.1", - "wasm-timer", -] - -[[package]] -name = "libp2p-swarm" -version = "0.29.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "either", - "futures 0.3.21", - "libp2p-core", - "log 0.4.17", - "rand 0.7.3", - "smallvec 1.8.0", - "void", - "wasm-timer", -] - -[[package]] -name = "libp2p-swarm-derive" -version = "0.23.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "quote", - "syn", -] - -[[package]] -name = "libp2p-tcp" -version = "0.28.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "async-io", - "futures 0.3.21", - "futures-timer", - "if-watch", - "ipnet", - "libc", - "libp2p-core", - "log 0.4.17", - "socket2 0.4.4", -] - -[[package]] -name = "libp2p-uds" -version = "0.28.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "async-std", - "futures 0.3.21", - "libp2p-core", - "log 0.4.17", -] - -[[package]] -name = "libp2p-wasm-ext" -version = "0.28.2" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "futures 0.3.21", - "js-sys", - "libp2p-core", - "parity-send-wrapper", - "wasm-bindgen", - "wasm-bindgen-futures", -] - -[[package]] -name = "libp2p-websocket" -version = "0.29.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "either", - "futures 0.3.21", - "futures-rustls", - "libp2p-core", - "log 0.4.17", - "quicksink", - "rw-stream-sink", - "soketto", - "url 2.2.2", - "webpki-roots", -] - -[[package]] -name = "libp2p-yamux" -version = "0.32.0" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "futures 0.3.21", - "libp2p-core", - "parking_lot 0.11.2", - "thiserror", - "yamux", -] - [[package]] name = "librocksdb-sys" -version = "0.6.1+6.28.2" +version = "0.8.0+7.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81bc587013734dadb7cf23468e531aa120788b87243648be42e2d3a072186291" +checksum = "611804e4666a25136fcc5f8cf425ab4d26c7f74ea245ffe92ea23b85b6420b5d" dependencies = [ "bindgen", "bzip2-sys", @@ -3758,25 +2883,10 @@ dependencies = [ "glob", "libc", "libz-sys", + "tikv-jemalloc-sys", "zstd-sys", ] -[[package]] -name = "libsecp256k1" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc1e2c808481a63dc6da2074752fdd4336a3c8fcc68b83db6f1fd5224ae7962" -dependencies = [ - "arrayref", - "crunchy", - "digest 0.8.1", - "hmac-drbg 0.2.0", - "rand 0.7.3", - "sha2 0.8.2", - "subtle 2.4.1", - "typenum", -] - [[package]] name = "libsecp256k1" version = "0.7.0" @@ -3785,12 +2895,12 @@ dependencies = [ "arrayref", "base64 0.13.0", "digest 0.9.0", - "hmac-drbg 0.3.0", + "hmac-drbg", "libsecp256k1-core", "libsecp256k1-gen-ecmult", "libsecp256k1-gen-genmult", "rand 0.8.5", - "serde 1.0.137", + "serde 1.0.145", "sha2 0.9.9", "typenum", ] @@ -3802,7 +2912,7 @@ source = "git+https://github.com/heliaxdev/libsecp256k1?rev=bbb3bd44a49db361f21d dependencies = [ "crunchy", "digest 0.9.0", - "subtle 2.4.1", + "subtle", ] [[package]] @@ -3847,14 +2957,22 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "link-cplusplus" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369" +dependencies = [ + "cc", +] + [[package]] name = "linked-hash-map" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" dependencies = [ - "serde 1.0.137", - "serde_test", + "serde 1.0.145", ] [[package]] @@ -3886,9 +3004,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" dependencies = [ "autocfg 1.1.0", "scopeguard", @@ -3934,24 +3052,6 @@ dependencies = [ "syn", ] -[[package]] -name = "lru" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ea2d928b485416e8908cff2d97d621db22b27f7b3b6729e438bcf42c671ba91" -dependencies = [ - "hashbrown 0.11.2", -] - -[[package]] -name = "lru-cache" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c" -dependencies = [ - "linked-hash-map", -] - [[package]] name = "mach" version = "0.3.2" @@ -3970,17 +3070,11 @@ dependencies = [ "indexmap", "linked-hash-map", "regex", - "serde 1.0.137", + "serde 1.0.145", "serde_derive", "serde_yaml", ] -[[package]] -name = "match_cfg" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" - [[package]] name = "matchers" version = "0.1.0" @@ -4020,9 +3114,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memmap2" -version = "0.5.4" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5172b50c23043ff43dd53e51392f36519d9b35a8f3a410d30ece5d1aedd58ae" +checksum = "95af15f345b17af2efc8ead6080fb8bc376f8cec1b35277b935637595fe77498" dependencies = [ "libc", ] @@ -4055,15 +3149,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eee18ff0c94dec5f2da5faa939b3b40122c9c38ff6d934d0917b5313ddc7b5e4" dependencies = [ "crossbeam-channel", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.12", "integer-encoding", - "lazy_static 1.4.0", + "lazy_static", "log 0.4.17", "mio 0.7.14", - "serde 1.0.137", + "serde 1.0.145", "strum", "tungstenite 0.16.0", - "url 2.2.2", + "url 2.3.1", ] [[package]] @@ -4091,26 +3185,6 @@ dependencies = [ "unicase 2.6.0", ] -[[package]] -name = "minicbor" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51aa5bb0ca22415daca596a227b507f880ad1b2318a87fa9325312a5d285ca0d" -dependencies = [ - "minicbor-derive", -] - -[[package]] -name = "minicbor-derive" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54999f917cd092b13904737e26631aa2b2b88d625db68e4bab461dcd8006c788" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "minimal-lexical" version = "0.2.1" @@ -4119,9 +3193,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc" +checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34" dependencies = [ "adler", ] @@ -4160,26 +3234,14 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713d550d9b44d89174e066b7a6217ae06234c10cb47819a88290d2b353c31799" +checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf" dependencies = [ "libc", "log 0.4.17", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys", -] - -[[package]] -name = "mio-extras" -version = "2.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19" -dependencies = [ - "lazycell", - "log 0.4.17", - "mio 0.6.23", - "slab", + "windows-sys 0.36.1", ] [[package]] @@ -4224,33 +3286,6 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" -[[package]] -name = "multihash" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dac63698b887d2d929306ea48b63760431ff8a24fac40ddb22f9c7f49fb7cab" -dependencies = [ - "digest 0.9.0", - "generic-array 0.14.5", - "multihash-derive", - "sha2 0.9.9", - "unsigned-varint 0.5.1", -] - -[[package]] -name = "multihash-derive" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "424f6e86263cd5294cbd7f1e95746b95aca0e0d66bff31e5a40d6baa87b4aa99" -dependencies = [ - "proc-macro-crate 1.1.3", - "proc-macro-error", - "proc-macro2", - "quote", - "syn", - "synstructure", -] - [[package]] name = "multimap" version = "0.8.3" @@ -4275,22 +3310,9 @@ dependencies = [ "twoway", ] -[[package]] -name = "multistream-select" -version = "0.10.3" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "bytes 1.1.0", - "futures 0.3.21", - "log 0.4.17", - "pin-project 1.0.10", - "smallvec 1.8.0", - "unsigned-varint 0.7.1", -] - [[package]] name = "namada" -version = "0.7.1" +version = "0.8.1" dependencies = [ "ark-bls12-381", "ark-ec", @@ -4315,21 +3337,21 @@ dependencies = [ "ibc-proto 0.17.1 (git+https://github.com/heliaxdev/ibc-rs?rev=9fcc1c8c19db6af50806ffe5b2f6c214adcbfd5d)", "ibc-proto 0.17.1 (git+https://github.com/heliaxdev/ibc-rs.git?rev=f4703dfe2c1f25cc431279ab74f10f3e0f6827e2)", "ics23", - "itertools 0.10.3", - "libsecp256k1 0.7.0", + "itertools", + "libsecp256k1", "loupe", "namada_proof_of_stake", "num-rational 0.4.1", "parity-wasm", "pretty_assertions", "proptest", - "prost 0.9.0", - "prost-types 0.9.0", + "prost", + "prost-types", "pwasm-utils", "rand 0.8.5", - "rand_core 0.6.3", + "rand_core 0.6.4", "rust_decimal", - "serde 1.0.137", + "serde 1.0.145", "serde_json", "sha2 0.9.9", "sparse-merkle-tree", @@ -4342,8 +3364,8 @@ dependencies = [ "thiserror", "tiny-keccak", "tonic-build", - "tracing 0.1.35", - "tracing-subscriber 0.3.11", + "tracing 0.1.37", + "tracing-subscriber 0.3.16", "wasmer", "wasmer-cache", "wasmer-compiler-singlepass", @@ -4356,7 +3378,7 @@ dependencies = [ [[package]] name = "namada_apps" -version = "0.7.1" +version = "0.8.1" dependencies = [ "ark-serialize", "ark-std", @@ -4365,20 +3387,20 @@ dependencies = [ "async-trait", "base64 0.13.0", "bech32", + "bimap", "bit-set", "blake2b-rs", "borsh", "byte-unit", "byteorder", - "bytes 1.1.0", - "cargo-watch", + "bytes 1.2.1", "circular-queue", - "clap 3.0.0-beta.2", + "clap", "clarity", "color-eyre", "config", + "data-encoding", "derivative", - "directories", "ed25519-consensus", "ethabi", "eyre", @@ -4386,13 +3408,11 @@ dependencies = [ "ferveo-common", "file-lock", "flate2", - "futures 0.3.21", + "futures 0.3.25", "git2", - "hex", - "itertools 0.10.3", + "itertools", "libc", "libloading", - "libp2p", "message-io", "namada", "num-derive", @@ -4401,12 +3421,11 @@ dependencies = [ "num_cpus", "once_cell", "orion", - "pathdiff", "proptest", - "prost 0.9.0", - "prost-types 0.9.0", + "prost", + "prost-types", "rand 0.8.5", - "rand_core 0.6.3", + "rand_core 0.6.4", "rayon", "regex", "reqwest", @@ -4414,7 +3433,7 @@ dependencies = [ "rocksdb", "rpassword", "semver 1.0.14", - "serde 1.0.137", + "serde 1.0.145", "serde_bytes", "serde_json", "serde_regex", @@ -4438,13 +3457,12 @@ dependencies = [ "tokio-test", "toml", "tonic", - "tonic-build", "tower", "tower-abci 0.1.0 (git+https://github.com/heliaxdev/tower-abci?rev=f6463388fc319b6e210503b43b3aecf6faf6b200)", "tower-abci 0.1.0 (git+https://github.com/heliaxdev/tower-abci.git?rev=fcc0014d0bda707109901abfa1b2f782d242f082)", - "tracing 0.1.35", + "tracing 0.1.37", "tracing-log", - "tracing-subscriber 0.3.11", + "tracing-subscriber 0.3.16", "warp", "web30", "websocket", @@ -4453,18 +3471,18 @@ dependencies = [ [[package]] name = "namada_encoding_spec" -version = "0.7.1" +version = "0.8.1" dependencies = [ "borsh", - "itertools 0.10.3", - "lazy_static 1.4.0", + "itertools", + "lazy_static", "madato", "namada", ] [[package]] name = "namada_macros" -version = "0.7.1" +version = "0.8.1" dependencies = [ "quote", "syn", @@ -4472,71 +3490,78 @@ dependencies = [ [[package]] name = "namada_proof_of_stake" -version = "0.7.1" +version = "0.8.1" dependencies = [ "borsh", + "derivative", "proptest", "thiserror", ] [[package]] name = "namada_tests" -version = "0.7.1" +version = "0.8.1" dependencies = [ "assert_cmd", "borsh", "chrono", "color-eyre", "concat-idents", + "data-encoding", "derivative", "escargot", "expectrl", "eyre", "file-serve", "fs_extra", - "hex", - "itertools 0.10.3", - "libp2p", + "itertools", "namada", "namada_apps", - "namada_vm_env", + "namada_tx_prelude", + "namada_vp_prelude", "pretty_assertions", "proptest", - "prost 0.9.0", + "prost", "rand 0.8.5", "serde_json", "sha2 0.9.9", "tempfile", "test-log", "toml", - "tracing 0.1.35", - "tracing-subscriber 0.3.11", + "tracing 0.1.37", + "tracing-subscriber 0.3.16", ] [[package]] name = "namada_tx_prelude" -version = "0.7.1" +version = "0.8.1" dependencies = [ + "borsh", + "namada", + "namada_macros", "namada_vm_env", - "sha2 0.10.2", + "sha2 0.10.6", + "thiserror", ] [[package]] name = "namada_vm_env" -version = "0.7.1" +version = "0.8.1" dependencies = [ "borsh", - "hex", "namada", - "namada_macros", ] [[package]] name = "namada_vp_prelude" -version = "0.7.1" +version = "0.8.1" dependencies = [ + "borsh", + "namada", + "namada_macros", "namada_vm_env", - "sha2 0.10.2", + "sha2 0.10.6", + "thiserror", ] [[package]] @@ -4545,7 +3570,7 @@ version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd7e2f3618557f980e0b17e8856252eee3c97fa12c54dff0ca290fb6266ca4a9" dependencies = [ - "lazy_static 1.4.0", + "lazy_static", "libc", "log 0.4.17", "openssl", @@ -4559,28 +3584,15 @@ dependencies = [ [[package]] name = "net2" -version = "0.2.37" +version = "0.2.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "391630d12b68002ae1e25e8f974306474966550ad82dac6886fb8910c19568ae" +checksum = "74d0df99cfcd2530b2e694f6e17e7f37b8e26bb23983ac530c0c97408837c631" dependencies = [ "cfg-if 0.1.10", "libc", "winapi 0.3.9", ] -[[package]] -name = "nix" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5e06129fb611568ef4e868c14b326274959aa70ff7776e9d55323531c374945" -dependencies = [ - "bitflags", - "cc", - "cfg-if 1.0.0", - "libc", - "memoffset", -] - [[package]] name = "nix" version = "0.21.2" @@ -4596,34 +3608,29 @@ dependencies = [ [[package]] name = "nix" -version = "0.23.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f866317acbd3a240710c63f065ffb1e4fd466259045ccb504130b7f668f35c6" +checksum = "195cdbc1741b8134346d515b3a56a1c94b0912758009cfd53f99ea0f57b065fc" dependencies = [ "bitflags", - "cc", "cfg-if 1.0.0", "libc", - "memoffset", ] [[package]] name = "nix" -version = "0.24.1" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f17df307904acd05aa8e32e97bb20f2a0df1728bbc2d771ae8f9a90463441e9" +checksum = "e322c04a9e3440c327fca7b6c8a63e6890a32fa2ad689db972425f07e0d22abb" dependencies = [ + "autocfg 1.1.0", "bitflags", "cfg-if 1.0.0", "libc", + "memoffset", + "pin-utils", ] -[[package]] -name = "nohash-hasher" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" - [[package]] name = "nom" version = "5.1.2" @@ -4646,29 +3653,21 @@ dependencies = [ ] [[package]] -name = "notify" -version = "4.0.17" +name = "ntapi" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae03c8c853dba7bfd23e571ff0cff7bc9dceb40a4cd684cd1681824183f45257" +checksum = "c28774a7fd2fbb4f0babd8237ce554b73af68021b5f695a3cebd6c59bac0980f" dependencies = [ - "bitflags", - "filetime", - "fsevent", - "fsevent-sys", - "inotify", - "libc", - "mio 0.6.23", - "mio-extras", - "walkdir", "winapi 0.3.9", ] [[package]] -name = "ntapi" -version = "0.3.7" +name = "nu-ansi-term" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28774a7fd2fbb4f0babd8237ce554b73af68021b5f695a3cebd6c59bac0980f" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" dependencies = [ + "overload", "winapi 0.3.9", ] @@ -4693,7 +3692,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" dependencies = [ "num-bigint 0.4.3", - "num-complex 0.4.1", + "num-complex 0.4.2", "num-integer", "num-iter", "num-rational 0.4.1", @@ -4720,7 +3719,7 @@ dependencies = [ "autocfg 1.1.0", "num-integer", "num-traits 0.2.15", - "serde 1.0.137", + "serde 1.0.145", ] [[package]] @@ -4735,9 +3734,9 @@ dependencies = [ [[package]] name = "num-complex" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fbc387afefefd5e9e39493299f3069e14a140dd34dc19b4c1c1a8fddb6a790" +checksum = "7ae39348c8bc5fbd7f40c727a9925f03517afd2ab27d46702108b6a7e5414c19" dependencies = [ "num-traits 0.2.15", ] @@ -4822,11 +3821,11 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa9b5179e82f0867b23e0b9b822493821f9345561f271364f409c8e4a058367d" dependencies = [ - "lazy_static 1.4.0", + "lazy_static", "num 0.4.0", "num-derive", "num-traits 0.2.15", - "serde 1.0.137", + "serde 1.0.145", "serde_derive", ] @@ -4849,12 +3848,6 @@ dependencies = [ "libc", ] -[[package]] -name = "numtoa" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8f8bdf33df195859076e54ab11ee78a1b208382d3a26ec40d142ffc1ecc49ef" - [[package]] name = "object" version = "0.28.4" @@ -4867,11 +3860,20 @@ dependencies = [ "memchr", ] +[[package]] +name = "object" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" +dependencies = [ + "memchr", +] + [[package]] name = "once_cell" -version = "1.12.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225" +checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" [[package]] name = "opaque-debug" @@ -4887,9 +3889,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.40" +version = "0.10.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb81a6430ac911acb25fe5ac8f1d2af1b4ea8a4fdfda0f1ee4292af2e2d8eb0e" +checksum = "12fc0523e3bd51a692c8850d075d74dc062ccf251c0110668cbd921917118a13" dependencies = [ "bitflags", "cfg-if 1.0.0", @@ -4919,9 +3921,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.74" +version = "0.9.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835363342df5fba8354c5b453325b110ffd54044e588c539cf2f20a8014e4cb1" +checksum = "5230151e44c0f05157effb743e8d517472843121cf9243e8b81393edb5acd9ce" dependencies = [ "autocfg 1.1.0", "cc", @@ -4937,8 +3939,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6624905ddd92e460ff0685567539ed1ac985b2dee4c92c7edcd64fce905b00c" dependencies = [ "ct-codecs", - "getrandom 0.2.6", - "subtle 2.4.1", + "getrandom 0.2.7", + "subtle", "zeroize", ] @@ -4957,41 +3959,30 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "owo-colors" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2386b4ebe91c2f7f51082d4cefa145d030e33a1842a96b12e4885cc3c01f7a55" -[[package]] -name = "parity-multiaddr" -version = "0.11.2" -source = "git+https://github.com/heliaxdev/rust-libp2p.git?rev=1abe349c231eb307d3dbe03f3ffffc6cf5e9084d#1abe349c231eb307d3dbe03f3ffffc6cf5e9084d" -dependencies = [ - "arrayref", - "bs58", - "byteorder", - "data-encoding", - "multihash", - "percent-encoding 2.1.0", - "serde 1.0.137", - "static_assertions", - "unsigned-varint 0.7.1", - "url 2.2.2", -] - [[package]] name = "parity-scale-codec" -version = "3.1.5" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9182e4a71cae089267ab03e67c99368db7cd877baf50f931e5d6d4b71e195ac0" +checksum = "366e44391a8af4cfd6002ef6ba072bae071a96aafca98d7d448a34c5dca38b6a" dependencies = [ "arrayvec 0.7.2", "bitvec", "byte-slice-cast", "impl-trait-for-tuples", "parity-scale-codec-derive", - "serde 1.0.137", + "serde 1.0.145", ] [[package]] @@ -5000,18 +3991,12 @@ version = "3.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9299338969a3d2f491d65f140b00ddec470858402f888af98e8642fb5e8965cd" dependencies = [ - "proc-macro-crate 1.1.3", + "proc-macro-crate 1.2.1", "proc-macro2", "quote", "syn", ] -[[package]] -name = "parity-send-wrapper" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa9777aa91b8ad9dd5aaa04a9b6bcb02c7f1deb952fca5a66034d5e63afc5c6f" - [[package]] name = "parity-wasm" version = "0.42.2" @@ -5035,25 +4020,14 @@ dependencies = [ "rustc_version 0.2.3", ] -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api 0.4.7", - "parking_lot_core 0.8.5", -] - [[package]] name = "parking_lot" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ - "lock_api 0.4.7", - "parking_lot_core 0.9.3", + "lock_api 0.4.9", + "parking_lot_core 0.9.4", ] [[package]] @@ -5073,42 +4047,22 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" -dependencies = [ - "cfg-if 1.0.0", - "instant", - "libc", - "redox_syscall 0.2.13", - "smallvec 1.8.0", - "winapi 0.3.9", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929" +checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.2.13", - "smallvec 1.8.0", - "windows-sys", + "redox_syscall 0.2.16", + "smallvec 1.10.0", + "windows-sys 0.42.0", ] [[package]] name = "paste" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc" - -[[package]] -name = "pathdiff" -version = "0.2.1" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" +checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" [[package]] name = "peeking_take_while" @@ -5151,9 +4105,9 @@ checksum = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" [[package]] name = "percent-encoding" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" [[package]] name = "pest" @@ -5164,72 +4118,36 @@ dependencies = [ "ucd-trie", ] -[[package]] -name = "petgraph" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7" -dependencies = [ - "fixedbitset 0.2.0", - "indexmap", -] - [[package]] name = "petgraph" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5014253a1331579ce62aa67443b4a658c5e7dd03d4bc6d302b94474888143" dependencies = [ - "fixedbitset 0.4.1", + "fixedbitset", "indexmap", ] [[package]] name = "pin-project" -version = "0.4.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9615c18d31137579e9ff063499264ddc1278e7b1982757ebc111028c4d1dc909" -dependencies = [ - "pin-project-internal 0.4.29", -] - -[[package]] -name = "pin-project" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e" -dependencies = [ - "pin-project-internal 1.0.10", -] - -[[package]] -name = "pin-project-internal" -version = "0.4.29" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "044964427019eed9d49d9d5bbce6047ef18f37100ea400912a9fa4a3523ab12a" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" dependencies = [ - "proc-macro2", - "quote", - "syn", + "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.0.10" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" dependencies = [ "proc-macro2", "quote", "syn", ] -[[package]] -name = "pin-project-lite" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" - [[package]] name = "pin-project-lite" version = "0.2.9" @@ -5250,40 +4168,18 @@ checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" [[package]] name = "polling" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "685404d509889fade3e86fe3a5803bca2ec09b0c0778d5ada6ec8bf7a8de5259" +version = "2.3.0" +source = "git+https://github.com/heliaxdev/polling.git?rev=02a655775282879459a3460e2646b60c005bca2c#02a655775282879459a3460e2646b60c005bca2c" dependencies = [ + "autocfg 1.1.0", "cfg-if 1.0.0", "libc", "log 0.4.17", + "rustversion", "wepoll-ffi", "winapi 0.3.9", ] -[[package]] -name = "poly1305" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "048aeb476be11a4b6ca432ca569e375810de9294ae78f4774e78ea98a9246ede" -dependencies = [ - "cpufeatures 0.2.2", - "opaque-debug 0.3.0", - "universal-hash", -] - -[[package]] -name = "polyval" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8419d2b623c7c0896ff2d5d96e2cb4ede590fed28fcc34934f4c33c036e620a1" -dependencies = [ - "cfg-if 1.0.0", - "cpufeatures 0.2.2", - "opaque-debug 0.3.0", - "universal-hash", -] - [[package]] name = "ppv-lite86" version = "0.2.16" @@ -5297,7 +4193,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a5aab5be6e4732b473071984b3164dbbfb7a3674d30ea5ff44410b6bcd960c3c" dependencies = [ "difflib", - "itertools 0.10.3", + "itertools", "predicates-core", ] @@ -5353,10 +4249,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "1.1.3" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e17d47ce914bf4de440332250b0edd23ce48c005f59fab39d3335866b114f11a" +checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9" dependencies = [ + "once_cell", "thiserror", "toml", ] @@ -5387,9 +4284,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.39" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c54b25569025b7fc9651de43004ae593a75ad88543b17178aa5e1b9c4f15f56f" +checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" dependencies = [ "unicode-ident", ] @@ -5402,7 +4299,7 @@ dependencies = [ "bit-set", "bitflags", "byteorder", - "lazy_static 1.4.0", + "lazy_static", "num-traits 0.2.15", "quick-error 2.0.1", "rand 0.8.5", @@ -5413,42 +4310,14 @@ dependencies = [ "tempfile", ] -[[package]] -name = "prost" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e6984d2f1a23009bd270b8bb56d0926810a3d483f59c987d77969e9d8e840b2" -dependencies = [ - "bytes 1.1.0", - "prost-derive 0.7.0", -] - [[package]] name = "prost" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" dependencies = [ - "bytes 1.1.0", - "prost-derive 0.9.0", -] - -[[package]] -name = "prost-build" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32d3ebd75ac2679c2af3a92246639f9fcc8a442ee420719cc4fe195b98dd5fa3" -dependencies = [ - "bytes 1.1.0", - "heck 0.3.3", - "itertools 0.9.0", - "log 0.4.17", - "multimap", - "petgraph 0.5.1", - "prost 0.7.0", - "prost-types 0.7.0", - "tempfile", - "which", + "bytes 1.2.1", + "prost-derive", ] [[package]] @@ -5457,33 +4326,20 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "heck 0.3.3", - "itertools 0.10.3", - "lazy_static 1.4.0", + "itertools", + "lazy_static", "log 0.4.17", "multimap", - "petgraph 0.6.2", - "prost 0.9.0", - "prost-types 0.9.0", + "petgraph", + "prost", + "prost-types", "regex", "tempfile", "which", ] -[[package]] -name = "prost-derive" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "169a15f3008ecb5160cba7d37bcd690a7601b6d30cfb87a117d45e59d52af5d4" -dependencies = [ - "anyhow", - "itertools 0.9.0", - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "prost-derive" version = "0.9.0" @@ -5491,30 +4347,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9cc1a3263e07e0bf68e96268f37665207b49560d98739662cdfaae215c720fe" dependencies = [ "anyhow", - "itertools 0.10.3", + "itertools", "proc-macro2", "quote", "syn", ] -[[package]] -name = "prost-types" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b518d7cdd93dab1d1122cf07fa9a60771836c668dde9d9e2a139f957f0d9f1bb" -dependencies = [ - "bytes 1.1.0", - "prost 0.7.0", -] - [[package]] name = "prost-types" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" dependencies = [ - "bytes 1.1.0", - "prost 0.9.0", + "bytes 1.2.1", + "prost", ] [[package]] @@ -5569,22 +4415,11 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" -[[package]] -name = "quicksink" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77de3c815e5a160b1539c6592796801df2043ae35e123b46d73380cfa57af858" -dependencies = [ - "futures-core", - "futures-sink", - "pin-project-lite 0.1.12", -] - [[package]] name = "quote" -version = "1.0.18" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1" +checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" dependencies = [ "proc-macro2", ] @@ -5635,7 +4470,7 @@ checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha 0.3.1", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -5665,7 +4500,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -5694,11 +4529,11 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.6", + "getrandom 0.2.7", ] [[package]] @@ -5778,7 +4613,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -5801,7 +4636,7 @@ checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" dependencies = [ "crossbeam-channel", "crossbeam-deque", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.12", "num_cpus", ] @@ -5822,33 +4657,13 @@ checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" [[package]] name = "redox_syscall" -version = "0.2.13" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ "bitflags", ] -[[package]] -name = "redox_termios" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8440d8acb4fd3d277125b4bd01a6f38aee8d814b3b5fc09b3f2b825d37d3fe8f" -dependencies = [ - "redox_syscall 0.2.13", -] - -[[package]] -name = "redox_users" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" -dependencies = [ - "getrandom 0.2.6", - "redox_syscall 0.2.13", - "thiserror", -] - [[package]] name = "regalloc" version = "0.0.31" @@ -5857,14 +4672,14 @@ checksum = "571f7f397d61c4755285cd37853fe8e03271c243424a907415909379659381c5" dependencies = [ "log 0.4.17", "rustc-hash", - "smallvec 1.8.0", + "smallvec 1.10.0", ] [[package]] name = "regex" -version = "1.5.6" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1" +checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" dependencies = [ "aho-corasick", "memchr", @@ -5882,9 +4697,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.26" +version = "0.6.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64" +checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" [[package]] name = "region" @@ -5918,48 +4733,39 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.10" +version = "0.11.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46a1f7aa4f35e5e8b4160449f51afc758f0ce6454315a9fa7d0d113e958c41eb" +checksum = "431949c384f4e2ae07605ccaa56d1d9d2ecdb5cadd4f9577ccfab29f2e5149fc" dependencies = [ "base64 0.13.0", - "bytes 1.1.0", + "bytes 1.2.1", "encoding_rs", "futures-core", "futures-util", "h2", "http", "http-body", - "hyper 0.14.19", + "hyper 0.14.20", "hyper-tls", "ipnet", "js-sys", - "lazy_static 1.4.0", "log 0.4.17", "mime 0.3.16", "native-tls", - "percent-encoding 2.1.0", - "pin-project-lite 0.2.9", - "serde 1.0.137", + "once_cell", + "percent-encoding 2.2.0", + "pin-project-lite", + "serde 1.0.145", "serde_json", "serde_urlencoded", "tokio", "tokio-native-tls", - "url 2.2.2", + "tower-service", + "url 2.3.1", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "winreg 0.10.1", -] - -[[package]] -name = "resolv-conf" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00" -dependencies = [ - "hostname", - "quick-error 1.2.3", + "winreg", ] [[package]] @@ -5990,12 +4796,12 @@ dependencies = [ [[package]] name = "rkyv" -version = "0.7.38" +version = "0.7.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "517a3034eb2b1499714e9d1e49b2367ad567e07639b69776d35e259d9c27cca6" +checksum = "cec2b3485b07d96ddfd3134767b8a447b45ea4eb91448d0a35180ec0ffd5ed15" dependencies = [ "bytecheck", - "hashbrown 0.12.1", + "hashbrown 0.12.3", "ptr_meta", "rend", "rkyv_derive", @@ -6004,9 +4810,9 @@ dependencies = [ [[package]] name = "rkyv_derive" -version = "0.7.38" +version = "0.7.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "505c209ee04111a006431abf39696e640838364d67a107c559ababaf6fd8c9dd" +checksum = "6eaedadc88b53e36dd32d940ed21ae4d850d5916f2581526921f553a72ac34c4" dependencies = [ "proc-macro2", "quote", @@ -6024,19 +4830,19 @@ dependencies = [ [[package]] name = "rlp" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "999508abb0ae792aabed2460c45b89106d97fe4adac593bdaef433c2605847b5" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "rustc-hex", ] [[package]] name = "rocksdb" -version = "0.18.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "620f4129485ff1a7128d184bc687470c21c7951b64779ebc9cfdad3dcd920290" +checksum = "7e9562ea1d70c0cc63a34a22d977753b50cca91cc6b6527750463bd5dd8697bc" dependencies = [ "libc", "librocksdb-sys", @@ -6060,13 +4866,13 @@ checksum = "3e52c148ef37f8c375d49d5a73aa70713125b7f19095948a923f80afdeb22ec2" [[package]] name = "rust_decimal" -version = "1.24.0" +version = "1.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2ee7337df68898256ad0d4af4aad178210d9e44d2ff900ce44064a97cd86530" +checksum = "ee9164faf726e4f3ece4978b25ca877ddc6802fa77f38cdccb32c7f805ecd70c" dependencies = [ "arrayvec 0.7.2", "num-traits 0.2.15", - "serde 1.0.137", + "serde 1.0.145", ] [[package]] @@ -6139,11 +4945,20 @@ dependencies = [ "security-framework", ] +[[package]] +name = "rustls-pemfile" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eebeaeb360c87bfb72e84abdb3447159c0eaececf1bef2aecd65a8be949d1c9" +dependencies = [ + "base64 0.13.0", +] + [[package]] name = "rustversion" -version = "1.0.6" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" +checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" [[package]] name = "rusty-fork" @@ -6157,22 +4972,11 @@ dependencies = [ "wait-timeout", ] -[[package]] -name = "rw-stream-sink" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4da5fcb054c46f5a5dff833b129285a93d3f0179531735e6c866e8cc307d2020" -dependencies = [ - "futures 0.3.21", - "pin-project 0.4.29", - "static_assertions", -] - [[package]] name = "ryu" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" +checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" [[package]] name = "safe-proc-macro2" @@ -6227,15 +5031,6 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" -[[package]] -name = "salsa20" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecbd2eb639fd7cab5804a0837fe373cc2172d15437e804c054a9fb885cb923b0" -dependencies = [ - "cipher", -] - [[package]] name = "same-file" version = "1.0.6" @@ -6251,8 +5046,8 @@ version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2" dependencies = [ - "lazy_static 1.4.0", - "windows-sys", + "lazy_static", + "windows-sys 0.36.1", ] [[package]] @@ -6267,6 +5062,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +[[package]] +name = "scratch" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898" + [[package]] name = "sct" version = "0.6.1" @@ -6285,18 +5086,18 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" [[package]] name = "secp256k1" -version = "0.21.3" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c42e6f1735c5f00f51e43e28d6634141f2bcad10931b2609ddd74a86d751260" +checksum = "ff55dc09d460954e9ef2fa8a7ced735a964be9981fd50e870b2b3b0705e14964" dependencies = [ "secp256k1-sys", ] [[package]] name = "secp256k1-sys" -version = "0.4.2" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "957da2573cde917463ece3570eab4a0b3f19de6f1646cde62e6fd3868f566036" +checksum = "83080e2c2fc1006e625be82e5d1eb6a43b7fd9578b617fcc55814daf286bba4b" dependencies = [ "cc", ] @@ -6371,9 +5172,9 @@ checksum = "9dad3f759919b92c3068c696c15c3d17238234498bbdcc80f2c469606f948ac8" [[package]] name = "serde" -version = "1.0.137" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1" +checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b" dependencies = [ "serde_derive", ] @@ -6384,7 +5185,7 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a3a4e0ea8a88553209f6cc6cfe8724ecad22e1acf372793c27d995290fe74f8" dependencies = [ - "lazy_static 1.4.0", + "lazy_static", "num-traits 0.1.43", "regex", "serde 0.8.23", @@ -6399,23 +5200,23 @@ dependencies = [ "byteorder", "error", "num 0.2.1", - "serde 1.0.137", + "serde 1.0.145", ] [[package]] name = "serde_bytes" -version = "0.11.6" +version = "0.11.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "212e73464ebcde48d723aa02eb270ba62eff38a9b732df31f33f1b4e145f3a54" +checksum = "cfc50e8183eeeb6178dcb167ae34a8051d63535023ae38b5d8d12beae193d37b" dependencies = [ - "serde 1.0.137", + "serde 1.0.145", ] [[package]] name = "serde_derive" -version = "1.0.137" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be" +checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c" dependencies = [ "proc-macro2", "quote", @@ -6424,13 +5225,13 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.81" +version = "1.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c" +checksum = "6ce777b7b150d76b9cf60d28b55f5847135a003f7d7350c6be7a773508ce7d45" dependencies = [ "itoa", "ryu", - "serde 1.0.137", + "serde 1.0.145", ] [[package]] @@ -6440,29 +5241,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8136f1a4ea815d7eac4101cfd0b16dc0cb5e1fe1b8609dfd728058656b7badf" dependencies = [ "regex", - "serde 1.0.137", + "serde 1.0.145", ] [[package]] name = "serde_repr" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2ad84e47328a31223de7fed7a4f5087f2d6ddfe586cf3ca25b7a165bc0a5aed" +checksum = "1fe39d9fbb0ebf5eb2c7cb7e2a47e4f462fad1379f1166b8ae49ad9eae89a7ca" dependencies = [ "proc-macro2", "quote", "syn", ] -[[package]] -name = "serde_test" -version = "1.0.137" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe196827aea34242c314d2f0dd49ed00a129225e80dda71b0dbf65d54d25628d" -dependencies = [ - "serde 1.0.137", -] - [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -6472,7 +5264,7 @@ dependencies = [ "form_urlencoded", "itoa", "ryu", - "serde 1.0.137", + "serde 1.0.145", ] [[package]] @@ -6483,7 +5275,7 @@ checksum = "ef8099d3df28273c99a1728190c7a9f19d444c941044f64adf986bee7ec53051" dependencies = [ "dtoa", "linked-hash-map", - "serde 1.0.137", + "serde 1.0.145", "yaml-rust", ] @@ -6507,7 +5299,7 @@ checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" dependencies = [ "block-buffer 0.9.0", "cfg-if 1.0.0", - "cpufeatures 0.2.2", + "cpufeatures", "digest 0.9.0", "opaque-debug 0.3.0", ] @@ -6519,20 +5311,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f" dependencies = [ "cfg-if 1.0.0", - "cpufeatures 0.2.2", - "digest 0.10.3", + "cpufeatures", + "digest 0.10.5", ] [[package]] -name = "sha2" -version = "0.8.2" +name = "sha1" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a256f46ea78a0c0d9ff00077504903ac881a1dafdc20da66545699e7776b3e69" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" dependencies = [ - "block-buffer 0.7.3", - "digest 0.8.1", - "fake-simd", - "opaque-debug 0.2.3", + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.5", ] [[package]] @@ -6543,20 +5334,20 @@ checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer 0.9.0", "cfg-if 1.0.0", - "cpufeatures 0.2.2", + "cpufeatures", "digest 0.9.0", "opaque-debug 0.3.0", ] [[package]] name = "sha2" -version = "0.10.2" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" dependencies = [ "cfg-if 1.0.0", - "cpufeatures 0.2.2", - "digest 0.10.3", + "cpufeatures", + "digest 0.10.5", ] [[package]] @@ -6573,11 +5364,11 @@ dependencies = [ [[package]] name = "sha3" -version = "0.10.2" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a31480366ec990f395a61b7c08122d99bd40544fdb5abcfc1b06bb29994312c" +checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" dependencies = [ - "digest 0.10.3", + "digest 0.10.5", "keccak", ] @@ -6587,15 +5378,9 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" dependencies = [ - "lazy_static 1.4.0", + "lazy_static", ] -[[package]] -name = "shell-escape" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45bb67a18fa91266cc7807181f62f9178a6873bfad7dc788c42e6430db40184f" - [[package]] name = "shlex" version = "1.1.0" @@ -6623,9 +5408,9 @@ dependencies = [ [[package]] name = "signature" -version = "1.4.0" +version = "1.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02658e48d89f2bec991f9a78e69cfa4c316f8d6a6c4ec12fae1aeb263d486788" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" [[package]] name = "simple-error" @@ -6635,9 +5420,12 @@ checksum = "cc47a29ce97772ca5c927f75bac34866b16d64e07f330c3248e2d7226623901b" [[package]] name = "slab" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32" +checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +dependencies = [ + "autocfg 1.1.0", +] [[package]] name = "smallvec" @@ -6650,63 +5438,18 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" - -[[package]] -name = "snow" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6142f7c25e94f6fd25a32c3348ec230df9109b463f59c8c7acc4bd34936babb7" -dependencies = [ - "aes-gcm", - "blake2 0.9.2", - "chacha20poly1305", - "rand 0.8.5", - "rand_core 0.6.3", - "ring", - "rustc_version 0.3.3", - "sha2 0.9.9", - "subtle 2.4.1", - "x25519-dalek", -] - -[[package]] -name = "socket2" -version = "0.3.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e" -dependencies = [ - "cfg-if 1.0.0", - "libc", - "winapi 0.3.9", -] - -[[package]] -name = "socket2" -version = "0.4.4" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" -dependencies = [ - "libc", - "winapi 0.3.9", -] +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" [[package]] -name = "soketto" -version = "0.4.2" +name = "socket2" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5c71ed3d54db0a699f4948e1bb3e45b450fa31fe602621dee6680361d569c88" +checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" dependencies = [ - "base64 0.12.3", - "bytes 0.5.6", - "flate2", - "futures 0.3.21", - "httparse", - "log 0.4.17", - "rand 0.7.3", - "sha-1 0.9.8", + "libc", + "winapi 0.3.9", ] [[package]] @@ -6718,7 +5461,7 @@ checksum = "35391ea974fa5ee869cb094d5b437688fbf3d8127d64d1b9fed5822a1ed39b12" [[package]] name = "sparse-merkle-tree" version = "0.3.1-pre" -source = "git+https://github.com/heliaxdev/sparse-merkle-tree?branch=bat/arse-merkle-tree#04ad1eeb28901b57a7599bbe433b3822965dabe8" +source = "git+https://github.com/heliaxdev/sparse-merkle-tree?rev=04ad1eeb28901b57a7599bbe433b3822965dabe8#04ad1eeb28901b57a7599bbe433b3822965dabe8" dependencies = [ "blake2b-rs", "borsh", @@ -6745,25 +5488,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" -[[package]] -name = "stderrlog" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32e5ee9b90a5452c570a0b0ac1c99ae9498db7e56e33d74366de7f2a7add7f25" -dependencies = [ - "atty", - "chrono", - "log 0.4.17", - "termcolor", - "thread_local 0.3.4", -] - -[[package]] -name = "strsim" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" - [[package]] name = "strsim" version = "0.10.0" @@ -6772,18 +5496,18 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "strum" -version = "0.24.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e96acfc1b70604b8b2f1ffa4c57e59176c7dbb05d556c71ecd2f5498a1dee7f8" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" -version = "0.24.0" +version = "0.24.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6878079b17446e4d3eba6192bb0a2950d5b14f0ed8424b852310e5a94345d0ef" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.0", "proc-macro2", @@ -6795,7 +5519,7 @@ dependencies = [ [[package]] name = "subproductdomain" version = "0.1.0" -source = "git+https://github.com/anoma/ferveo#8363c33d1cf79f93ce9fa89d4b5fe998a5a78c26" +source = "git+https://github.com/anoma/ferveo#1022ab2c7ccc689abcc05e5a08df6fb0c2a3fc65" dependencies = [ "anyhow", "ark-ec", @@ -6805,12 +5529,6 @@ dependencies = [ "ark-std", ] -[[package]] -name = "subtle" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d67a5a62ba6e01cb2192ff309324cb4875d0c451d55fe2319433abe7a05a8ee" - [[package]] name = "subtle" version = "2.4.1" @@ -6834,9 +5552,9 @@ checksum = "734676eb262c623cec13c3155096e08d1f8f29adce39ba17948b18dad1e54142" [[package]] name = "syn" -version = "1.0.96" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0748dd251e24453cb8717f0354206b91557e4ec8703673a4b30208f2abaf1ebf" +checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1" dependencies = [ "proc-macro2", "quote", @@ -6901,7 +5619,7 @@ dependencies = [ "cfg-if 1.0.0", "fastrand", "libc", - "redox_syscall 0.2.13", + "redox_syscall 0.2.16", "remove_dir_all", "winapi 0.3.9", ] @@ -6912,25 +5630,25 @@ version = "0.23.5" source = "git+https://github.com/heliaxdev/tendermint-rs?rev=95c52476bc37927218374f94ac8e2a19bd35bec9#95c52476bc37927218374f94ac8e2a19bd35bec9" dependencies = [ "async-trait", - "bytes 1.1.0", + "bytes 1.2.1", "ed25519", "ed25519-dalek", "flex-error", - "futures 0.3.21", + "futures 0.3.25", "num-traits 0.2.15", "once_cell", - "prost 0.9.0", - "prost-types 0.9.0", - "serde 1.0.137", + "prost", + "prost-types", + "serde 1.0.145", "serde_bytes", "serde_json", "serde_repr", "sha2 0.9.9", "signature", - "subtle 2.4.1", + "subtle", "subtle-encoding", "tendermint-proto 0.23.5", - "time 0.3.9", + "time 0.3.15", "zeroize", ] @@ -6940,25 +5658,25 @@ version = "0.23.6" source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc2850830f4d8028c1fe1bd9f96284#87be41b8c9cc2850830f4d8028c1fe1bd9f96284" dependencies = [ "async-trait", - "bytes 1.1.0", + "bytes 1.2.1", "ed25519", "ed25519-dalek", "flex-error", - "futures 0.3.21", + "futures 0.3.25", "num-traits 0.2.15", "once_cell", - "prost 0.9.0", - "prost-types 0.9.0", - "serde 1.0.137", + "prost", + "prost-types", + "serde 1.0.145", "serde_bytes", "serde_json", "serde_repr", "sha2 0.9.9", "signature", - "subtle 2.4.1", + "subtle", "subtle-encoding", "tendermint-proto 0.23.6", - "time 0.3.9", + "time 0.3.15", "zeroize", ] @@ -6968,11 +5686,11 @@ version = "0.23.5" source = "git+https://github.com/heliaxdev/tendermint-rs?rev=95c52476bc37927218374f94ac8e2a19bd35bec9#95c52476bc37927218374f94ac8e2a19bd35bec9" dependencies = [ "flex-error", - "serde 1.0.137", + "serde 1.0.145", "serde_json", "tendermint 0.23.5", "toml", - "url 2.2.2", + "url 2.3.1", ] [[package]] @@ -6981,11 +5699,11 @@ version = "0.23.6" source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc2850830f4d8028c1fe1bd9f96284#87be41b8c9cc2850830f4d8028c1fe1bd9f96284" dependencies = [ "flex-error", - "serde 1.0.137", + "serde 1.0.145", "serde_json", "tendermint 0.23.6", "toml", - "url 2.2.2", + "url 2.3.1", ] [[package]] @@ -6995,10 +5713,10 @@ source = "git+https://github.com/heliaxdev/tendermint-rs?rev=95c52476bc379272183 dependencies = [ "derive_more", "flex-error", - "serde 1.0.137", + "serde 1.0.145", "tendermint 0.23.5", "tendermint-rpc 0.23.5", - "time 0.3.9", + "time 0.3.15", ] [[package]] @@ -7008,9 +5726,9 @@ source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc285 dependencies = [ "derive_more", "flex-error", - "serde 1.0.137", + "serde 1.0.145", "tendermint 0.23.6", - "time 0.3.9", + "time 0.3.15", ] [[package]] @@ -7018,16 +5736,16 @@ name = "tendermint-proto" version = "0.23.5" source = "git+https://github.com/heliaxdev/tendermint-rs?rev=95c52476bc37927218374f94ac8e2a19bd35bec9#95c52476bc37927218374f94ac8e2a19bd35bec9" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "flex-error", "num-derive", "num-traits 0.2.15", - "prost 0.9.0", - "prost-types 0.9.0", - "serde 1.0.137", + "prost", + "prost-types", + "serde 1.0.145", "serde_bytes", "subtle-encoding", - "time 0.3.9", + "time 0.3.15", ] [[package]] @@ -7035,16 +5753,16 @@ name = "tendermint-proto" version = "0.23.6" source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc2850830f4d8028c1fe1bd9f96284#87be41b8c9cc2850830f4d8028c1fe1bd9f96284" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "flex-error", "num-derive", "num-traits 0.2.15", - "prost 0.9.0", - "prost-types 0.9.0", - "serde 1.0.137", + "prost", + "prost-types", + "serde 1.0.145", "serde_bytes", "subtle-encoding", - "time 0.3.9", + "time 0.3.15", ] [[package]] @@ -7054,17 +5772,17 @@ source = "git+https://github.com/heliaxdev/tendermint-rs?rev=95c52476bc379272183 dependencies = [ "async-trait", "async-tungstenite", - "bytes 1.1.0", + "bytes 1.2.1", "flex-error", - "futures 0.3.21", - "getrandom 0.2.6", + "futures 0.3.25", + "getrandom 0.2.7", "http", - "hyper 0.14.19", + "hyper 0.14.20", "hyper-proxy", "hyper-rustls", "peg", - "pin-project 1.0.10", - "serde 1.0.137", + "pin-project", + "serde 1.0.145", "serde_bytes", "serde_json", "subtle-encoding", @@ -7072,10 +5790,10 @@ dependencies = [ "tendermint-config 0.23.5", "tendermint-proto 0.23.5", "thiserror", - "time 0.3.9", + "time 0.3.15", "tokio", - "tracing 0.1.35", - "url 2.2.2", + "tracing 0.1.37", + "url 2.3.1", "uuid", "walkdir", ] @@ -7087,17 +5805,17 @@ source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc285 dependencies = [ "async-trait", "async-tungstenite", - "bytes 1.1.0", + "bytes 1.2.1", "flex-error", - "futures 0.3.21", - "getrandom 0.2.6", + "futures 0.3.25", + "getrandom 0.2.7", "http", - "hyper 0.14.19", + "hyper 0.14.20", "hyper-proxy", "hyper-rustls", "peg", - "pin-project 1.0.10", - "serde 1.0.137", + "pin-project", + "serde 1.0.145", "serde_bytes", "serde_json", "subtle-encoding", @@ -7105,10 +5823,10 @@ dependencies = [ "tendermint-config 0.23.6", "tendermint-proto 0.23.6", "thiserror", - "time 0.3.9", + "time 0.3.15", "tokio", - "tracing 0.1.35", - "url 2.2.2", + "tracing 0.1.37", + "url 2.3.1", "uuid", "walkdir", ] @@ -7120,12 +5838,12 @@ source = "git+https://github.com/heliaxdev/tendermint-rs?rev=95c52476bc379272183 dependencies = [ "ed25519-dalek", "gumdrop", - "serde 1.0.137", + "serde 1.0.145", "serde_json", "simple-error", "tempfile", "tendermint 0.23.5", - "time 0.3.9", + "time 0.3.15", ] [[package]] @@ -7135,22 +5853,12 @@ source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc285 dependencies = [ "ed25519-dalek", "gumdrop", - "serde 1.0.137", + "serde 1.0.145", "serde_json", "simple-error", "tempfile", "tendermint 0.23.6", - "time 0.3.9", -] - -[[package]] -name = "term_size" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e4129646ca0ed8f45d09b929036bafad5377103edd06e50bf574b353d2b08d9" -dependencies = [ - "libc", - "winapi 0.3.9", + "time 0.3.15", ] [[package]] @@ -7162,18 +5870,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "termion" -version = "1.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "077185e2eac69c3f8379a4298e1e07cd36beb962290d4a51199acf0fdc10607e" -dependencies = [ - "libc", - "numtoa", - "redox_syscall 0.2.13", - "redox_termios", -] - [[package]] name = "termtree" version = "0.2.4" @@ -7182,25 +5878,15 @@ checksum = "507e9898683b6c43a9aa55b64259b721b52ba226e0f3779137e50ad114a4c90b" [[package]] name = "test-log" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4235dbf7ea878b3ef12dea20a59c134b405a66aafc4fc2c7b9935916e289e735" +checksum = "38f0c854faeb68a048f0f2dc410c5ddae3bf83854ef0e4977d58306a5edef50e" dependencies = [ "proc-macro2", "quote", "syn", ] -[[package]] -name = "textwrap" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" -dependencies = [ - "term_size", - "unicode-width", -] - [[package]] name = "textwrap" version = "0.12.1" @@ -7232,21 +5918,22 @@ dependencies = [ [[package]] name = "thread_local" -version = "0.3.4" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14" +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" dependencies = [ - "lazy_static 0.2.11", - "unreachable", + "once_cell", ] [[package]] -name = "thread_local" -version = "1.1.4" +name = "tikv-jemalloc-sys" +version = "0.5.2+5.3.0-patched" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +checksum = "ec45c14da997d0925c7835883e4d5c181f196fa142f8c19d7643d1e9af2592c3" dependencies = [ - "once_cell", + "cc", + "fs_extra", + "libc", ] [[package]] @@ -7262,9 +5949,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.9" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd" +checksum = "d634a985c4d4238ec39cacaed2e7ae552fbd3c476b552c1deac3021b7d7eaf0c" dependencies = [ "itoa", "libc", @@ -7296,8 +5983,8 @@ dependencies = [ "ascii", "chunked_transfer", "log 0.4.17", - "time 0.3.9", - "url 2.2.2", + "time 0.3.15", + "url 2.3.1", ] [[package]] @@ -7317,20 +6004,20 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.19.2" +version = "1.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c51a52ed6686dd62c320f9b89299e9dfb46f730c7a48e635c19f21d116cb1439" +checksum = "a9e03c497dc955702ba729190dc4aac6f2a0ce97f913e5b1b5912fc5039d9099" dependencies = [ - "bytes 1.1.0", + "autocfg 1.1.0", + "bytes 1.2.1", "libc", "memchr", - "mio 0.8.3", + "mio 0.8.4", "num_cpus", - "once_cell", "parking_lot 0.12.1", - "pin-project-lite 0.2.9", + "pin-project-lite", "signal-hook-registry", - "socket2 0.4.4", + "socket2", "tokio-macros", "winapi 0.3.9", ] @@ -7373,7 +6060,7 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" dependencies = [ - "pin-project-lite 0.2.9", + "pin-project-lite", "tokio", ] @@ -7418,7 +6105,7 @@ checksum = "09bc590ec4ba8ba87652da2068d150dcada2cfa2e07faae270a5e0409aa51351" dependencies = [ "crossbeam-utils 0.7.2", "futures 0.1.31", - "lazy_static 1.4.0", + "lazy_static", "log 0.4.17", "mio 0.6.23", "num_cpus", @@ -7442,12 +6129,12 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df54d54117d6fdc4e4fea40fe1e4e566b3505700e148a6827e59b34b0d2600d9" +checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" dependencies = [ "futures-core", - "pin-project-lite 0.2.9", + "pin-project-lite", "tokio", ] @@ -7482,7 +6169,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53474327ae5e166530d17f2d956afcb4f8a004de581b3cae10f12006bc8163e3" dependencies = [ "async-stream", - "bytes 1.1.0", + "bytes 1.2.1", "futures-core", "tokio", "tokio-stream", @@ -7501,15 +6188,14 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.15.0" +version = "0.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "511de3f85caf1c98983545490c3d09685fa8eb634e57eec22bb4db271f46cbd8" +checksum = "f714dd15bead90401d77e04243611caec13726c2408afd5b31901dfcdcb3b181" dependencies = [ "futures-util", "log 0.4.17", - "pin-project 1.0.10", "tokio", - "tungstenite 0.14.0", + "tungstenite 0.17.3", ] [[package]] @@ -7518,26 +6204,26 @@ version = "0.6.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "futures-core", "futures-sink", "log 0.4.17", - "pin-project-lite 0.2.9", + "pin-project-lite", "tokio", ] [[package]] name = "tokio-util" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc463cd8deddc3770d20f9852143d50bf6094e640b485cb2e189a2099085ff45" +checksum = "0bb2e075f03b3d66d8d8785356224ba688d2906a371015e225beeb65ca92c740" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "futures-core", "futures-sink", - "pin-project-lite 0.2.9", + "pin-project-lite", "tokio", - "tracing 0.1.35", + "tracing 0.1.37", ] [[package]] @@ -7546,7 +6232,7 @@ version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" dependencies = [ - "serde 1.0.137", + "serde 1.0.145", ] [[package]] @@ -7558,25 +6244,25 @@ dependencies = [ "async-stream", "async-trait", "base64 0.13.0", - "bytes 1.1.0", + "bytes 1.2.1", "futures-core", "futures-util", "h2", "http", "http-body", - "hyper 0.14.19", + "hyper 0.14.20", "hyper-timeout", - "percent-encoding 2.1.0", - "pin-project 1.0.10", - "prost 0.9.0", - "prost-derive 0.9.0", + "percent-encoding 2.2.0", + "pin-project", + "prost", + "prost-derive", "tokio", "tokio-stream", "tokio-util 0.6.10", "tower", "tower-layer", "tower-service", - "tracing 0.1.35", + "tracing 0.1.37", "tracing-futures 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -7587,30 +6273,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9403f1bafde247186684b230dc6f38b5cd514584e8bec1dd32514be4745fa757" dependencies = [ "proc-macro2", - "prost-build 0.9.0", + "prost-build", "quote", "syn", ] [[package]] name = "tower" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a89fd63ad6adf737582df5db40d286574513c69a11dac5214dc3b5603d6713e" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ "futures-core", "futures-util", "hdrhistogram", "indexmap", - "pin-project 1.0.10", - "pin-project-lite 0.2.9", + "pin-project", + "pin-project-lite", "rand 0.8.5", "slab", "tokio", - "tokio-util 0.7.3", + "tokio-util 0.7.4", "tower-layer", "tower-service", - "tracing 0.1.35", + "tracing 0.1.37", ] [[package]] @@ -7618,10 +6304,10 @@ name = "tower-abci" version = "0.1.0" source = "git+https://github.com/heliaxdev/tower-abci?rev=f6463388fc319b6e210503b43b3aecf6faf6b200#f6463388fc319b6e210503b43b3aecf6faf6b200" dependencies = [ - "bytes 1.1.0", - "futures 0.3.21", - "pin-project 1.0.10", - "prost 0.9.0", + "bytes 1.2.1", + "futures 0.3.25", + "pin-project", + "prost", "tendermint-proto 0.23.5", "tokio", "tokio-stream", @@ -7636,10 +6322,10 @@ name = "tower-abci" version = "0.1.0" source = "git+https://github.com/heliaxdev/tower-abci.git?rev=fcc0014d0bda707109901abfa1b2f782d242f082#fcc0014d0bda707109901abfa1b2f782d242f082" dependencies = [ - "bytes 1.1.0", - "futures 0.3.21", - "pin-project 1.0.10", - "prost 0.9.0", + "bytes 1.2.1", + "futures 0.3.25", + "pin-project", + "prost", "tendermint-proto 0.23.6", "tokio", "tokio-stream", @@ -7651,9 +6337,9 @@ dependencies = [ [[package]] name = "tower-layer" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "343bc9466d3fe6b0f960ef45960509f84480bf4fd96f92901afe7ff3df9d3a62" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" [[package]] name = "tower-make" @@ -7666,9 +6352,9 @@ dependencies = [ [[package]] name = "tower-service" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" @@ -7676,22 +6362,22 @@ version = "0.1.30" source = "git+https://github.com/tokio-rs/tracing/?tag=tracing-0.1.30#df4ba17d857db8ba1b553f7b293ac8ba967a42f8" dependencies = [ "cfg-if 1.0.0", - "pin-project-lite 0.2.9", + "pin-project-lite", "tracing-attributes 0.1.19", "tracing-core 0.1.22", ] [[package]] name = "tracing" -version = "0.1.35" +version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a400e31aa60b9d44a52a8ee0343b5b18566b03a8321e0d321f695cf56e940160" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" dependencies = [ "cfg-if 1.0.0", "log 0.4.17", - "pin-project-lite 0.2.9", - "tracing-attributes 0.1.21", - "tracing-core 0.1.27", + "pin-project-lite", + "tracing-attributes 0.1.23", + "tracing-core 0.1.30", ] [[package]] @@ -7706,9 +6392,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.21" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c" +checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" dependencies = [ "proc-macro2", "quote", @@ -7720,14 +6406,14 @@ name = "tracing-core" version = "0.1.22" source = "git+https://github.com/tokio-rs/tracing/?tag=tracing-0.1.30#df4ba17d857db8ba1b553f7b293ac8ba967a42f8" dependencies = [ - "lazy_static 1.4.0", + "lazy_static", ] [[package]] name = "tracing-core" -version = "0.1.27" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7709595b8878a4965ce5e87ebf880a7d39c9afc6837721b21a5a816a8117d921" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" dependencies = [ "once_cell", "valuable", @@ -7739,7 +6425,7 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4d7c0b83d4a500748fa5879461652b361edf5c9d51ede2a2ac03875ca185e24" dependencies = [ - "tracing 0.1.35", + "tracing 0.1.37", "tracing-subscriber 0.2.25", ] @@ -7749,8 +6435,8 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" dependencies = [ - "pin-project 1.0.10", - "tracing 0.1.35", + "pin-project", + "tracing 0.1.37", ] [[package]] @@ -7758,7 +6444,7 @@ name = "tracing-futures" version = "0.2.5" source = "git+https://github.com/tokio-rs/tracing/?tag=tracing-0.1.30#df4ba17d857db8ba1b553f7b293ac8ba967a42f8" dependencies = [ - "pin-project-lite 0.2.9", + "pin-project-lite", "tracing 0.1.30", ] @@ -7768,9 +6454,9 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" dependencies = [ - "lazy_static 1.4.0", + "lazy_static", "log 0.4.17", - "tracing-core 0.1.27", + "tracing-core 0.1.30", ] [[package]] @@ -7780,25 +6466,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e0d2eaa99c3c2e41547cfa109e910a68ea03823cccad4a0525dcbc9b01e8c71" dependencies = [ "sharded-slab", - "thread_local 1.1.4", - "tracing-core 0.1.27", + "thread_local", + "tracing-core 0.1.30", ] [[package]] name = "tracing-subscriber" -version = "0.3.11" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bc28f93baff38037f64e6f43d34cfa1605f27a49c34e8a04c5e78b0babf2596" +checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70" dependencies = [ - "ansi_term", - "lazy_static 1.4.0", "matchers", + "nu-ansi-term", + "once_cell", "regex", "sharded-slab", - "smallvec 1.8.0", - "thread_local 1.1.4", - "tracing 0.1.35", - "tracing-core 0.1.27", + "smallvec 1.10.0", + "thread_local", + "tracing 0.1.37", + "tracing-core 0.1.30", "tracing-log", ] @@ -7807,8 +6493,8 @@ name = "tracing-tower" version = "0.1.0" source = "git+https://github.com/tokio-rs/tracing/?tag=tracing-0.1.30#df4ba17d857db8ba1b553f7b293ac8ba967a42f8" dependencies = [ - "futures 0.3.21", - "pin-project-lite 0.2.9", + "futures 0.3.25", + "pin-project-lite", "tower-layer", "tower-make", "tower-service", @@ -7822,49 +6508,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "efd1f82c56340fdf16f2a953d7bda4f8fdffba13d93b00844c25572110b26079" -[[package]] -name = "trust-dns-proto" -version = "0.20.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca94d4e9feb6a181c690c4040d7a24ef34018d8313ac5044a61d21222ae24e31" -dependencies = [ - "async-trait", - "cfg-if 1.0.0", - "data-encoding", - "enum-as-inner", - "futures-channel", - "futures-io", - "futures-util", - "idna 0.2.3", - "ipnet", - "lazy_static 1.4.0", - "log 0.4.17", - "rand 0.8.5", - "smallvec 1.8.0", - "thiserror", - "tinyvec", - "url 2.2.2", -] - -[[package]] -name = "trust-dns-resolver" -version = "0.20.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecae383baad9995efaa34ce8e57d12c3f305e545887472a492b838f4b5cfb77a" -dependencies = [ - "cfg-if 1.0.0", - "futures-util", - "ipconfig", - "lazy_static 1.4.0", - "log 0.4.17", - "lru-cache", - "parking_lot 0.11.2", - "resolv-conf", - "smallvec 1.8.0", - "thiserror", - "trust-dns-proto", -] - [[package]] name = "try-lock" version = "0.2.3" @@ -7879,52 +6522,52 @@ checksum = "8ada8297e8d70872fa9a551d93250a9f407beb9f37ef86494eb20012a2ff7c24" dependencies = [ "base64 0.13.0", "byteorder", - "bytes 1.1.0", + "bytes 1.2.1", "http", "httparse", "input_buffer", "log 0.4.17", "rand 0.8.5", "sha-1 0.9.8", - "url 2.2.2", + "url 2.3.1", "utf-8", ] [[package]] name = "tungstenite" -version = "0.14.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0b2d8558abd2e276b0a8df5c05a2ec762609344191e5fd23e292c910e9165b5" +checksum = "6ad3713a14ae247f22a728a0456a545df14acf3867f905adff84be99e23b3ad1" dependencies = [ "base64 0.13.0", "byteorder", - "bytes 1.1.0", + "bytes 1.2.1", "http", "httparse", "log 0.4.17", "rand 0.8.5", "sha-1 0.9.8", "thiserror", - "url 2.2.2", + "url 2.3.1", "utf-8", ] [[package]] name = "tungstenite" -version = "0.16.0" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ad3713a14ae247f22a728a0456a545df14acf3867f905adff84be99e23b3ad1" +checksum = "e27992fd6a8c29ee7eef28fc78349aa244134e10ad447ce3b9f0ac0ed0fa4ce0" dependencies = [ "base64 0.13.0", "byteorder", - "bytes 1.1.0", + "bytes 1.2.1", "http", "httparse", "log 0.4.17", "rand 0.8.5", - "sha-1 0.9.8", + "sha-1 0.10.0", "thiserror", - "url 2.2.2", + "url 2.3.1", "utf-8", ] @@ -7951,15 +6594,15 @@ checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" [[package]] name = "ucd-trie" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" +checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" [[package]] name = "uint" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f03af7ccf01dd611cc450a0d10dbc9b745770d096473e2faf0ca6e2d66d1e0" +checksum = "a45526d29728d135c2900b0d30573fe3ee79fceb12ef534c7bb30e810a91b601" dependencies = [ "byteorder", "crunchy", @@ -7993,73 +6636,36 @@ checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" [[package]] name = "unicode-ident" -version = "1.0.0" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d22af068fba1eb5edcb4aea19d382b2a3deb4c8f9d475c589b6ada9e0fd493ee" +checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" [[package]] name = "unicode-normalization" -version = "0.1.19" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" dependencies = [ "tinyvec", ] [[package]] name = "unicode-segmentation" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" +checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" [[package]] name = "unicode-width" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] name = "unicode-xid" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" - -[[package]] -name = "universal-hash" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f214e8f697e925001e66ec2c6e37a4ef93f0f78c2eed7814394e10c62025b05" -dependencies = [ - "generic-array 0.14.5", - "subtle 2.4.1", -] - -[[package]] -name = "unreachable" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" -dependencies = [ - "void", -] - -[[package]] -name = "unsigned-varint" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7fdeedbf205afadfe39ae559b75c3240f24e257d0ca27e85f85cb82aa19ac35" - -[[package]] -name = "unsigned-varint" -version = "0.7.1" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d86a8dc7f45e4c1b0d30e43038c38f274e77af056aa5f74b93c2cf9eb3c1c836" -dependencies = [ - "asynchronous-codec", - "bytes 1.1.0", - "futures-io", - "futures-util", -] +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "untrusted" @@ -8080,14 +6686,13 @@ dependencies = [ [[package]] name = "url" -version = "2.2.2" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" dependencies = [ "form_urlencoded", - "idna 0.2.3", - "matches", - "percent-encoding 2.1.0", + "idna 0.3.0", + "percent-encoding 2.2.0", ] [[package]] @@ -8108,7 +6713,7 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" dependencies = [ - "getrandom 0.2.6", + "getrandom 0.2.7", ] [[package]] @@ -8151,32 +6756,6 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" -[[package]] -name = "void" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" - -[[package]] -name = "vswhom" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be979b7f07507105799e854203b470ff7c78a1639e330a58f183b5fea574608b" -dependencies = [ - "libc", - "vswhom-sys", -] - -[[package]] -name = "vswhom-sys" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22025f6d8eb903ebf920ea6933b70b1e495be37e2cb4099e62c80454aaf57c39" -dependencies = [ - "cc", - "libc", -] - [[package]] name = "wait-timeout" version = "0.2.0" @@ -8215,32 +6794,33 @@ dependencies = [ [[package]] name = "warp" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cef4e1e9114a4b7f1ac799f16ce71c14de5778500c5450ec6b7b920c55b587e" +checksum = "ed7b8be92646fc3d18b06147664ebc5f48d222686cb11a8755e561a735aacc6d" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "futures-channel", "futures-util", "headers", "http", - "hyper 0.14.19", + "hyper 0.14.20", "log 0.4.17", "mime 0.3.16", "mime_guess", "multipart", - "percent-encoding 2.1.0", - "pin-project 1.0.10", + "percent-encoding 2.2.0", + "pin-project", + "rustls-pemfile", "scoped-tls", - "serde 1.0.137", + "serde 1.0.145", "serde_json", "serde_urlencoded", "tokio", "tokio-stream", "tokio-tungstenite", - "tokio-util 0.6.10", + "tokio-util 0.7.4", "tower-service", - "tracing 0.1.35", + "tracing 0.1.37", ] [[package]] @@ -8263,9 +6843,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.80" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27370197c907c55e3f1a9fbe26f44e937fe6451368324e009cba39e139dc08ad" +checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268" dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -8273,13 +6853,13 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.80" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53e04185bfa3a779273da532f5025e33398409573f348985af9a1cbf3774d3f4" +checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142" dependencies = [ "bumpalo", - "lazy_static 1.4.0", "log 0.4.17", + "once_cell", "proc-macro2", "quote", "syn", @@ -8288,9 +6868,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.30" +version = "0.4.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f741de44b75e14c35df886aff5f1eb73aa114fa5d4d00dcd37b5e01259bf3b2" +checksum = "23639446165ca5a5de86ae1d8896b737ae80319560fbaa4c2887b7da6e7ebd7d" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -8300,9 +6880,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.80" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17cae7ff784d7e83a2fe7611cfe766ecf034111b49deb850a3dc7699c08251f5" +checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -8310,9 +6890,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.80" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99ec0dc7a4756fffc231aab1b9f2f578d23cd391390ab27f952ae0c9b3ece20b" +checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" dependencies = [ "proc-macro2", "quote", @@ -8323,34 +6903,19 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.80" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d554b7f530dee5964d9a9468d95c1f8b8acae4f282807e7d27d4b03099a46744" +checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f" [[package]] name = "wasm-encoder" -version = "0.13.0" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f0c17267a5ffd6ae3d897589460e21db1673c84fb7016b909c9691369a75ea" +checksum = "c64ac98d5d61192cc45c701b7e4bd0b9aff91e2edfc7a088406cfe2288581e2c" dependencies = [ "leb128", ] -[[package]] -name = "wasm-timer" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f" -dependencies = [ - "futures 0.3.21", - "js-sys", - "parking_lot 0.11.2", - "pin-utils", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - [[package]] name = "wasmer" version = "2.2.0" @@ -8398,9 +6963,9 @@ dependencies = [ "enumset", "loupe", "rkyv", - "serde 1.0.137", + "serde 1.0.145", "serde_bytes", - "smallvec 1.8.0", + "smallvec 1.10.0", "target-lexicon", "thiserror", "wasmer-types", @@ -8421,9 +6986,9 @@ dependencies = [ "loupe", "more-asserts", "rayon", - "smallvec 1.8.0", + "smallvec 1.10.0", "target-lexicon", - "tracing 0.1.35", + "tracing 0.1.37", "wasmer-compiler", "wasmer-types", "wasmer-vm", @@ -8438,11 +7003,11 @@ dependencies = [ "byteorder", "dynasm", "dynasmrt", - "lazy_static 1.4.0", + "lazy_static", "loupe", "more-asserts", "rayon", - "smallvec 1.8.0", + "smallvec 1.10.0", "wasmer-compiler", "wasmer-types", "wasmer-vm", @@ -8468,12 +7033,12 @@ checksum = "41db0ac4df90610cda8320cfd5abf90c6ec90e298b6fe5a09a81dff718b55640" dependencies = [ "backtrace", "enumset", - "lazy_static 1.4.0", + "lazy_static", "loupe", "memmap2", "more-asserts", "rustc-demangle", - "serde 1.0.137", + "serde 1.0.145", "serde_bytes", "target-lexicon", "thiserror", @@ -8494,11 +7059,11 @@ dependencies = [ "leb128", "libloading", "loupe", - "object", + "object 0.28.4", "rkyv", - "serde 1.0.137", + "serde 1.0.145", "tempfile", - "tracing 0.1.35", + "tracing 0.1.37", "wasmer-compiler", "wasmer-engine", "wasmer-object", @@ -8533,7 +7098,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d0c4005592998bd840f2289102ef9c67b6138338ed78e1fc0809586aa229040" dependencies = [ - "object", + "object 0.28.4", "thiserror", "wasmer-compiler", "wasmer-types", @@ -8548,7 +7113,7 @@ dependencies = [ "indexmap", "loupe", "rkyv", - "serde 1.0.137", + "serde 1.0.145", "thiserror", ] @@ -8569,7 +7134,7 @@ dependencies = [ "more-asserts", "region", "rkyv", - "serde 1.0.137", + "serde 1.0.145", "thiserror", "wasmer-types", "winapi 0.3.9", @@ -8589,9 +7154,9 @@ checksum = "718ed7c55c2add6548cca3ddd6383d738cd73b892df400e96b9aa876f0141d7a" [[package]] name = "wast" -version = "42.0.0" +version = "47.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "badcb03f976f983ff0daf294da9697be659442f61e6b0942bb37a2b6cbfe9dd4" +checksum = "02b98502f3978adea49551e801a6687678e6015317d7d9470a67fe813393f2a8" dependencies = [ "leb128", "memchr", @@ -8601,38 +7166,18 @@ dependencies = [ [[package]] name = "wat" -version = "1.0.44" +version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b92f20b742ac527066c8414bc0637352661b68cab07ef42586cefaba71c965cf" +checksum = "7aab4e20c60429fbba9670a6cae0fff9520046ba0aa3e6d0b1cd2653bea14898" dependencies = [ "wast", ] -[[package]] -name = "watchexec" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a0fba7f2b9f4240dadf1c2eb4cf15359ffeb19d4f1841cb2d85a7bb4f82755f" -dependencies = [ - "clap 2.34.0", - "derive_builder", - "embed-resource", - "env_logger", - "glob", - "globset", - "lazy_static 1.4.0", - "log 0.4.17", - "nix 0.20.2", - "notify", - "walkdir", - "winapi 0.3.9", -] - [[package]] name = "web-sys" -version = "0.3.57" +version = "0.3.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b17e741662c70c8bd24ac5c5b18de314a2c26c32bf8346ee1e6f53de919c283" +checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f" dependencies = [ "js-sys", "wasm-bindgen", @@ -8646,12 +7191,12 @@ checksum = "41c0c0c928020760cc69884944d54e7fca43ff5d00933d0a63fd85155466fbed" dependencies = [ "awc", "clarity", - "futures 0.3.21", - "lazy_static 1.4.0", + "futures 0.3.25", + "lazy_static", "log 0.4.17", "num 0.4.0", "num256", - "serde 1.0.137", + "serde 1.0.145", "serde_derive", "serde_json", "tokio", @@ -8678,9 +7223,9 @@ dependencies = [ [[package]] name = "websocket" -version = "0.26.4" +version = "0.26.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0e2836502b48713d4e391e7e016df529d46e269878fe5d961b15a1fd6417f1a" +checksum = "92aacab060eea423e4036820ddd28f3f9003b2c4d8048cbda985e5a14e18038d" dependencies = [ "bytes 0.4.12", "futures 0.1.31", @@ -8699,9 +7244,9 @@ dependencies = [ [[package]] name = "websocket-base" -version = "0.26.2" +version = "0.26.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "403f3fd505ff930da84156389639932955fb09705b3dccd1a3d60c8e7ff62776" +checksum = "49aec794b07318993d1db16156d5a9c750120597a5ee40c6b928d416186cb138" dependencies = [ "base64 0.10.1", "bitflags", @@ -8728,21 +7273,15 @@ dependencies = [ [[package]] name = "which" -version = "4.2.5" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c4fb54e6113b6a8772ee41c3404fb0301ac79604489467e0a9ce1f3e97c24ae" +checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b" dependencies = [ "either", - "lazy_static 1.4.0", "libc", + "once_cell", ] -[[package]] -name = "widestring" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c168940144dd21fd8046987c16a46a33d5fc84eec29ef9dcddc2ac9e31526b7c" - [[package]] name = "winapi" version = "0.2.8" @@ -8812,6 +7351,27 @@ dependencies = [ "windows_x86_64_msvc 0.36.1", ] +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc 0.42.0", + "windows_i686_gnu 0.42.0", + "windows_i686_msvc 0.42.0", + "windows_x86_64_gnu 0.42.0", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc 0.42.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" + [[package]] name = "windows_aarch64_msvc" version = "0.29.0" @@ -8824,6 +7384,12 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" + [[package]] name = "windows_i686_gnu" version = "0.29.0" @@ -8836,6 +7402,12 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" +[[package]] +name = "windows_i686_gnu" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" + [[package]] name = "windows_i686_msvc" version = "0.29.0" @@ -8848,6 +7420,12 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" +[[package]] +name = "windows_i686_msvc" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" + [[package]] name = "windows_x86_64_gnu" version = "0.29.0" @@ -8860,6 +7438,18 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" + [[package]] name = "windows_x86_64_msvc" version = "0.29.0" @@ -8873,13 +7463,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" [[package]] -name = "winreg" -version = "0.6.2" +name = "windows_x86_64_msvc" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2986deb581c4fe11b621998a5e53361efe6b48a151178d0cd9eeffa4dc6acc9" -dependencies = [ - "winapi 0.3.9", -] +checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" [[package]] name = "winreg" @@ -8909,17 +7496,6 @@ dependencies = [ "tap", ] -[[package]] -name = "x25519-dalek" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a0c105152107e3b96f6a00a65e86ce82d9b125230e1c4302940eca58ff71f4f" -dependencies = [ - "curve25519-dalek", - "rand_core 0.5.1", - "zeroize", -] - [[package]] name = "xattr" version = "0.2.3" @@ -8938,25 +7514,11 @@ dependencies = [ "linked-hash-map", ] -[[package]] -name = "yamux" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7d9028f208dd5e63c614be69f115c1b53cacc1111437d4c765185856666c107" -dependencies = [ - "futures 0.3.21", - "log 0.4.17", - "nohash-hasher", - "parking_lot 0.11.2", - "rand 0.8.5", - "static_assertions", -] - [[package]] name = "zeroize" -version = "1.5.5" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94693807d016b2f2d2e14420eb3bfcca689311ff775dcf113d74ea624b7cdf07" +checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" dependencies = [ "zeroize_derive", ] @@ -8975,18 +7537,18 @@ dependencies = [ [[package]] name = "zstd" -version = "0.10.2+zstd.1.5.2" +version = "0.11.2+zstd.1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4a6bd64f22b5e3e94b4e238669ff9f10815c27a5180108b849d24174a83847" +checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "4.1.6+zstd.1.5.2" +version = "5.0.2+zstd.1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b61c51bb270702d6167b8ce67340d2754b088d0c091b06e593aa772c3ee9bb" +checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db" dependencies = [ "libc", "zstd-sys", @@ -8994,9 +7556,9 @@ dependencies = [ [[package]] name = "zstd-sys" -version = "1.6.3+zstd.1.5.2" +version = "2.0.1+zstd.1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc49afa5c8d634e75761feda8c592051e7eeb4683ba827211eb0d731d3402ea8" +checksum = "9fd07cbbc53846d9145dbffdf6dd09a7a0aa52be46741825f5c97bdd4f73f12b" dependencies = [ "cc", "libc", diff --git a/Cargo.toml b/Cargo.toml index 5c499bf6d7..8e242cb0b4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,13 +20,15 @@ exclude = [ ] [patch.crates-io] -# TODO backported patch in the noise protocl for , blocked on libp2p upgrade -libp2p = {git = "https://github.com/heliaxdev/rust-libp2p.git", rev = "1abe349c231eb307d3dbe03f3ffffc6cf5e9084d"} # TODO temp patch for , and more tba. borsh = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} borsh-derive = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} borsh-derive-internal = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} borsh-schema-derive-internal = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} +# The following 3 crates patch a work-around for https://github.com/smol-rs/polling/issues/38 breaking namada tooling build with nightly 2022-05-20 +polling = {git = "https://github.com/heliaxdev/polling.git", rev = "02a655775282879459a3460e2646b60c005bca2c"} +async-io = {git = "https://github.com/heliaxdev/async-io.git", rev = "9285dad39c9a37ecd0dbd498c5ce5b0e65b02489"} +async-process = {git = "https://github.com/heliaxdev/async-process.git", rev = "e42c527e87d937da9e01aaeb563c0b948580dc89"} # borsh = {path = "../borsh-rs/borsh"} # borsh-derive = {path = "../borsh-rs/borsh-derive"} # borsh-derive-internal = {path = "../borsh-rs/borsh-derive-internal"} @@ -39,6 +41,7 @@ tendermint-proto = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev tendermint-rpc = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} tendermint-testgen = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} tendermint-light-client-verifier = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} +expectrl = {git = "https://github.com/james-chf/expectrl.git", branch = "james/raw-logger"} # patched to a commit on the `eth-bridge-integration` branch of our fork ibc = {git = "https://github.com/heliaxdev/ibc-rs.git", rev = "f4703dfe2c1f25cc431279ab74f10f3e0f6827e2"} diff --git a/Makefile b/Makefile index a6d93a25e8..b2d0393df9 100644 --- a/Makefile +++ b/Makefile @@ -25,6 +25,9 @@ build-test: build-release: ANOMA_DEV=false $(cargo) build --release --package namada_apps --manifest-path Cargo.toml +install-release: + ANOMA_DEV=false $(cargo) install --path ./apps --locked + check-release: ANOMA_DEV=false $(cargo) check --release --package namada_apps @@ -177,12 +180,21 @@ build-wasm-image-docker: build-wasm-scripts-docker: build-wasm-image-docker docker run --rm -v ${PWD}:/__w/namada/namada namada-wasm make build-wasm-scripts -# Build the validity predicate, transactions, matchmaker and matchmaker filter wasm +debug-wasm-scripts-docker: build-wasm-image-docker + docker run --rm -v ${PWD}:/usr/local/rust/wasm anoma-wasm make debug-wasm-scripts + +# Build the validity predicate and transactions wasm build-wasm-scripts: make -C $(wasms) make opt-wasm make checksum-wasm +# Debug build the validity predicate, transactions, matchmaker and matchmaker filter wasm +debug-wasm-scripts: + make -C $(wasms) debug + make opt-wasm + make checksum-wasm + # need python checksum-wasm: python3 wasm/checksums.py @@ -206,4 +218,4 @@ test-miri: MIRIFLAGS="-Zmiri-disable-isolation" $(cargo) +$(nightly) miri test -.PHONY : build check build-release clippy install run-ledger run-gossip reset-ledger test test-debug fmt watch clean build-doc doc build-wasm-scripts-docker build-wasm-scripts clean-wasm-scripts dev-deps test-miri test-unit test-unit-abcipp clippy-abcipp +.PHONY : build check build-release clippy install run-ledger run-gossip reset-ledger test test-debug fmt watch clean build-doc doc build-wasm-scripts-docker debug-wasm-scripts-docker build-wasm-scripts debug-wasm-scripts clean-wasm-scripts dev-deps test-miri test-unit test-unit-abcipp clippy-abcipp diff --git a/README.md b/README.md index 323bc5e9b8..6888f89e87 100644 --- a/README.md +++ b/README.md @@ -5,9 +5,8 @@ ## Overview [Namada](http://namada.net) is a sovereign proof-of-stake blockchain, using Tendermint BFT -consensus, that enables multi-asset private transfers for any native -or non-native asset using a multi-asset shielded pool derived from -the Sapling circuit. Namada features full IBC protocol support, +consensus, that enables multi-asset shielded transfers for any native +or non-native asset. Namada features full IBC protocol support, a natively integrated Ethereum bridge, a modern proof-of-stake system with automatic reward compounding and cubic slashing, and a stake-weighted governance signalling mechanism. Users of shielded @@ -20,9 +19,9 @@ interaction with the protocol. ## 📓 Docs -- user docs: built from [docs mdBook](./documentation/docs/) -- dev docs: built from [dev mdBook](./documentation/dev/) -- specifications: built from [specs mdBook](./documentation/specs/) +* user docs: built from [docs mdBook](./documentation/docs/) +* dev docs: built from [dev mdBook](./documentation/dev/) +* specifications: built from [specs mdBook](./documentation/specs/) ## Warning @@ -47,7 +46,7 @@ Guide. ## ⚙️ Development ```shell -# Build the provided validity predicate, transaction and matchmaker wasm modules +# Build the provided validity predicate and transaction wasm modules make build-wasm-scripts-docker # Development (debug) build Anoma, which includes a validator and some default @@ -69,11 +68,11 @@ make clippy To change the log level, set `ANOMA_LOG` environment variable to one of: -- `error` -- `warn` -- `info` -- `debug` -- `trace` +* `error` +* `warn` +* `info` +* `debug` +* `trace` The default is set to `info` for all the modules, expect for Tendermint ABCI, which has a lot of `debug` logging. diff --git a/apps/Cargo.toml b/apps/Cargo.toml index fc22eb7040..3aaa849ade 100644 --- a/apps/Cargo.toml +++ b/apps/Cargo.toml @@ -6,7 +6,8 @@ license = "GPL-3.0" name = "namada_apps" readme = "../README.md" resolver = "2" -version = "0.7.1" +version = "0.8.1" +default-run = "namada" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -64,9 +65,10 @@ abciplus = [ [dependencies] namada = {path = "../shared", features = ["wasm-runtime", "ferveo-tpke", "rand", "secp256k1-sign-verify"]} -arse-merkle-tree = {package = "sparse-merkle-tree", git = "https://github.com/heliaxdev/sparse-merkle-tree", branch = "bat/arse-merkle-tree", features = ["std", "borsh"]} ark-serialize = "0.3.0" ark-std = "0.3.0" +# branch = "bat/arse-merkle-tree" +arse-merkle-tree = {package = "sparse-merkle-tree", git = "https://github.com/heliaxdev/sparse-merkle-tree", rev = "04ad1eeb28901b57a7599bbe433b3822965dabe8", features = ["std", "borsh"]} async-std = {version = "=1.11.0", features = ["unstable"]} async-trait = "0.1.51" base64 = "0.13.0" @@ -81,8 +83,8 @@ clap = {git = "https://github.com/clap-rs/clap/", tag = "v3.0.0-beta.2", default clarity = "0.5.1" color-eyre = "0.5.10" config = "0.11.0" +data-encoding = "2.3.2" derivative = "2.2.0" -directories = "4.0.1" ed25519-consensus = "1.2.0" ethabi = "17.0.0" ferveo = {git = "https://github.com/anoma/ferveo"} @@ -91,11 +93,9 @@ eyre = "0.6.5" flate2 = "1.0.22" file-lock = "2.0.2" futures = "0.3" -hex = "0.4.3" itertools = "0.10.1" libc = "0.2.97" libloading = "0.7.2" -libp2p = "0.38.0" message-io = {version = "0.14.3", default-features = false, features = ["websocket"]} num256 = "0.3.5" num-derive = "0.3.3" @@ -103,7 +103,6 @@ num-traits = "0.2.14" num_cpus = "1.13.0" once_cell = "1.8.0" orion = "0.16.0" -pathdiff = "0.2.1" prost = "0.9.0" prost-types = "0.9.0" rand = {version = "0.8", default-features = false} @@ -112,7 +111,7 @@ rayon = "=1.5.1" regex = "1.4.5" reqwest = "0.11.4" rlimit = "0.5.4" -rocksdb = {version = "0.18.0", features = ['zstd'], default-features = false} +rocksdb = {version = "0.19.0", features = ['zstd', 'jemalloc'], default-features = false} rpassword = "5.0.1" serde = {version = "1.0.125", features = ["derive"]} serde_bytes = "0.11.5" @@ -148,6 +147,7 @@ tracing-subscriber = {version = "0.3.7", features = ["env-filter"]} web30 = "0.19.1" websocket = "0.26.2" winapi = "0.3.9" +bimap = {version = "0.6.2", features = ["serde"]} warp = "0.3.2" bytes = "1.1.0" @@ -155,7 +155,6 @@ bytes = "1.1.0" [dev-dependencies] assert_matches = "1.5.0" namada = {path = "../shared", features = ["testing", "wasm-runtime"]} -cargo-watch = "7.5.0" bit-set = "0.5.2" # A fork with state machime testing proptest = {git = "https://github.com/heliaxdev/proptest", branch = "tomas/sm"} @@ -165,4 +164,3 @@ tokio-test = "0.4.2" [build-dependencies] git2 = "0.13.25" -tonic-build = "0.6.0" diff --git a/apps/build.rs b/apps/build.rs index ae49503e78..32f7c57e87 100644 --- a/apps/build.rs +++ b/apps/build.rs @@ -1,6 +1,5 @@ -use std::fs::{read_to_string, File}; +use std::fs::File; use std::io::Write; -use std::process::Command; use std::{env, str}; use git2::{DescribeFormatOptions, DescribeOptions, Repository}; @@ -8,9 +7,6 @@ use git2::{DescribeFormatOptions, DescribeOptions, Repository}; /// Path to the .proto source files, relative to `apps` directory const PROTO_SRC: &str = "./proto"; -/// The version should match the one we use in the `Makefile` -const RUSTFMT_TOOLCHAIN_SRC: &str = "../rust-nightly-version"; - fn main() { // Discover the repository version, if it exists println!("cargo:rerun-if-changed=../.git"); @@ -66,43 +62,4 @@ fn main() { println!("cargo:rustc-cfg=feature=\"dev\""); } } - - let mut use_rustfmt = false; - - // The version should match the one we use in the `Makefile` - if let Ok(rustfmt_toolchain) = read_to_string(RUSTFMT_TOOLCHAIN_SRC) { - // Try to find the path to rustfmt. - if let Ok(output) = Command::new("rustup") - .args(&[ - "which", - "rustfmt", - "--toolchain", - rustfmt_toolchain.trim(), - ]) - .output() - { - if let Ok(rustfmt) = str::from_utf8(&output.stdout) { - // Set the command to be used by tonic_build below to format the - // generated files - let rustfmt = rustfmt.trim(); - if !rustfmt.is_empty() { - println!("using rustfmt from path \"{}\"", rustfmt); - env::set_var("RUSTFMT", rustfmt); - use_rustfmt = true - } - } - } - } - - tonic_build::configure() - .out_dir("src/lib/proto/generated") - .format(use_rustfmt) - .extern_path(".types", "::namada::proto::generated::types") - // This warning appears in tonic generated code - .server_mod_attribute(".", "#[allow(clippy::unit_arg)]") - // TODO try to add json encoding to simplify use for user - // .type_attribute("types.Intent", "#[derive(serde::Serialize, - // serde::Deserialize)]") - .compile(&[format!("{}/services.proto", PROTO_SRC)], &[PROTO_SRC]) - .unwrap(); } diff --git a/apps/src/bin/anoma-client/cli.rs b/apps/src/bin/anoma-client/cli.rs index d04eeff9ab..b87cdb5c66 100644 --- a/apps/src/bin/anoma-client/cli.rs +++ b/apps/src/bin/anoma-client/cli.rs @@ -3,10 +3,10 @@ use color_eyre::eyre::Result; use namada_apps::cli; use namada_apps::cli::cmds::*; -use namada_apps::client::{gossip, rpc, tx, utils}; +use namada_apps::client::{rpc, tx, utils}; pub async fn main() -> Result<()> { - match cli::anoma_client_cli() { + match cli::anoma_client_cli()? { cli::AnomaClient::WithContext(cmd_box) => { let (cmd, ctx) = *cmd_box; use AnomaClientWithContext as Sub; @@ -80,13 +80,6 @@ pub async fn main() -> Result<()> { Sub::QueryProtocolParameters(QueryProtocolParameters(args)) => { rpc::query_protocol_parameters(ctx, args).await; } - // Gossip cmds - Sub::Intent(Intent(args)) => { - gossip::gossip_intent(ctx, args).await; - } - Sub::SubscribeTopic(SubscribeTopic(args)) => { - gossip::subscribe_topic(ctx, args).await; - } } } cli::AnomaClient::WithoutContext(cmd, global_args) => match cmd { @@ -94,6 +87,9 @@ pub async fn main() -> Result<()> { Utils::JoinNetwork(JoinNetwork(args)) => { utils::join_network(global_args, args).await } + Utils::FetchWasms(FetchWasms(args)) => { + utils::fetch_wasms(global_args, args).await + } Utils::InitNetwork(InitNetwork(args)) => { utils::init_network(global_args, args) } diff --git a/apps/src/bin/anoma-node/cli.rs b/apps/src/bin/anoma-node/cli.rs index 407a6b7378..d2ce7b608a 100644 --- a/apps/src/bin/anoma-node/cli.rs +++ b/apps/src/bin/anoma-node/cli.rs @@ -1,11 +1,11 @@ //! Anoma node CLI. use eyre::{Context, Result}; -use namada_apps::cli::{self, args, cmds}; -use namada_apps::node::{gossip, ledger, matchmaker}; +use namada_apps::cli::{self, cmds}; +use namada_apps::node::ledger; pub fn main() -> Result<()> { - let (cmd, mut ctx) = cli::anoma_node_cli(); + let (cmd, mut ctx) = cli::anoma_node_cli()?; if let Some(mode) = ctx.global_args.mode.clone() { ctx.config.ledger.tendermint.tendermint_mode = mode; } @@ -20,55 +20,6 @@ pub fn main() -> Result<()> { .wrap_err("Failed to reset Anoma node")?; } }, - cmds::AnomaNode::Gossip(sub) => match sub { - cmds::Gossip::Run(cmds::GossipRun(args::GossipRun { - addr, - rpc, - })) => { - let config = ctx.config; - let mut gossip_cfg = config.intent_gossiper; - gossip_cfg.update(addr, rpc); - gossip::run( - gossip_cfg, - &config - .ledger - .shell - .base_dir - .join(ctx.global_config.default_chain_id.as_str()), - ) - .wrap_err("Failed to run gossip service")?; - } - }, - cmds::AnomaNode::Matchmaker(cmds::Matchmaker(args::Matchmaker { - intent_gossiper_addr, - matchmaker_path, - tx_code_path, - ledger_addr, - tx_signing_key, - tx_source_address, - })) => { - let tx_signing_key = ctx.get_cached(&tx_signing_key); - let tx_source_address = ctx.get(&tx_source_address); - - let wasm_dir = ctx.wasm_dir(); - let config = ctx.config; - let mut mm_config = config.matchmaker; - if matchmaker_path.is_some() { - mm_config.matchmaker_path = matchmaker_path; - } - if tx_code_path.is_some() { - mm_config.tx_code_path = tx_code_path; - } - - matchmaker::run( - mm_config, - intent_gossiper_addr, - ledger_addr, - tx_signing_key, - tx_source_address, - wasm_dir, - ); - } cmds::AnomaNode::Config(sub) => match sub { cmds::Config::Gen(cmds::ConfigGen) => { // If the config doesn't exit, it gets generated in the context. diff --git a/apps/src/bin/anoma-wallet/cli.rs b/apps/src/bin/anoma-wallet/cli.rs index 3889489956..6bab0b19a4 100644 --- a/apps/src/bin/anoma-wallet/cli.rs +++ b/apps/src/bin/anoma-wallet/cli.rs @@ -12,7 +12,7 @@ use namada_apps::cli::{args, cmds, Context}; use namada_apps::wallet::DecryptionError; pub fn main() -> Result<()> { - let (cmd, ctx) = cli::anoma_wallet_cli(); + let (cmd, ctx) = cli::anoma_wallet_cli()?; match cmd { cmds::AnomaWallet::Key(sub) => match sub { cmds::WalletKey::Gen(cmds::KeyGen(args)) => { @@ -28,8 +28,8 @@ pub fn main() -> Result<()> { cmds::WalletAddress::Gen(cmds::AddressGen(args)) => { key_and_address_gen(ctx, args) } - cmds::WalletAddress::Find(cmds::AddressFind(args)) => { - address_find(ctx, args) + cmds::WalletAddress::Find(cmds::AddressOrAliasFind(args)) => { + address_or_alias_find(ctx, args) } cmds::WalletAddress::List(cmds::AddressList) => address_list(ctx), cmds::WalletAddress::Add(cmds::AddressAdd(args)) => { @@ -191,17 +191,36 @@ fn address_list(ctx: Context) { } } -/// Find address by its alias. -fn address_find(ctx: Context, args: args::AddressFind) { +/// Find address (alias) by its alias (address). +fn address_or_alias_find(ctx: Context, args: args::AddressOrAliasFind) { let wallet = ctx.wallet; - if let Some(address) = wallet.find_address(&args.alias) { - println!("Found address {}", address.to_pretty_string()); - } else { - println!( - "No address with alias {} found. Use the command `address list` \ - to see all the known addresses.", - args.alias.to_lowercase() + if args.address.is_some() && args.alias.is_some() { + panic!( + "This should not be happening: clap should emit its own error \ + message." ); + } else if args.alias.is_some() { + if let Some(address) = + wallet.find_address(&args.alias.as_ref().unwrap()) + { + println!("Found address {}", address.to_pretty_string()); + } else { + println!( + "No address with alias {} found. Use the command `address \ + list` to see all the known addresses.", + args.alias.unwrap().to_lowercase() + ); + } + } else if args.address.is_some() { + if let Some(alias) = wallet.find_alias(args.address.as_ref().unwrap()) { + println!("Found alias {}", alias); + } else { + println!( + "No alias with address {} found. Use the command `address \ + list` to see all the known addresses.", + args.address.unwrap() + ); + } } } diff --git a/apps/src/bin/anoma/cli.rs b/apps/src/bin/anoma/cli.rs index b0ed783276..ccde0c3618 100644 --- a/apps/src/bin/anoma/cli.rs +++ b/apps/src/bin/anoma/cli.rs @@ -39,10 +39,7 @@ fn handle_command(cmd: cli::cmds::Anoma, raw_sub_cmd: String) -> Result<()> { } match cmd { - cli::cmds::Anoma::Node(_) - | cli::cmds::Anoma::Ledger(_) - | cli::cmds::Anoma::Gossip(_) - | cli::cmds::Anoma::Matchmaker(_) => { + cli::cmds::Anoma::Node(_) | cli::cmds::Anoma::Ledger(_) => { handle_subcommand("namadan", sub_args) } cli::cmds::Anoma::Client(_) @@ -52,8 +49,9 @@ fn handle_command(cmd: cli::cmds::Anoma, raw_sub_cmd: String) -> Result<()> { | cli::cmds::Anoma::TxInitNft(_) | cli::cmds::Anoma::TxMintNft(_) | cli::cmds::Anoma::TxInitProposal(_) - | cli::cmds::Anoma::TxVoteProposal(_) - | cli::cmds::Anoma::Intent(_) => handle_subcommand("namadac", sub_args), + | cli::cmds::Anoma::TxVoteProposal(_) => { + handle_subcommand("namadac", sub_args) + } cli::cmds::Anoma::Wallet(_) => handle_subcommand("namadaw", sub_args), } } diff --git a/apps/src/lib/cli.rs b/apps/src/lib/cli.rs index 59178da8f1..ad90f1d2cb 100644 --- a/apps/src/lib/cli.rs +++ b/apps/src/lib/cli.rs @@ -9,7 +9,8 @@ pub mod context; mod utils; -use clap::{crate_authors, AppSettings, ArgMatches}; +use clap::{AppSettings, ArgGroup, ArgMatches}; +use color_eyre::eyre::Result; pub use utils::safe_exit; use utils::*; @@ -17,7 +18,7 @@ pub use self::context::Context; include!("../../version.rs"); -const APP_NAME: &str = "Anoma"; +const APP_NAME: &str = "Namada"; // Main Anoma sub-commands const NODE_CMD: &str = "node"; @@ -41,8 +42,6 @@ pub mod cmds { // Inlined commands from the node. Ledger(Ledger), - Gossip(Gossip), - Matchmaker(Matchmaker), // Inlined commands from the client. TxCustom(TxCustom), @@ -52,7 +51,6 @@ pub mod cmds { TxMintNft(TxMintNft), TxInitProposal(TxInitProposal), TxVoteProposal(TxVoteProposal), - Intent(Intent), } impl Cmd for Anoma { @@ -61,8 +59,6 @@ pub mod cmds { .subcommand(AnomaClient::def()) .subcommand(AnomaWallet::def()) .subcommand(Ledger::def()) - .subcommand(Gossip::def()) - .subcommand(Matchmaker::def()) .subcommand(TxCustom::def()) .subcommand(TxTransfer::def()) .subcommand(TxUpdateVp::def()) @@ -70,7 +66,6 @@ pub mod cmds { .subcommand(TxMintNft::def()) .subcommand(TxInitProposal::def()) .subcommand(TxVoteProposal::def()) - .subcommand(Intent::def()) } fn parse(matches: &ArgMatches) -> Option { @@ -78,8 +73,6 @@ pub mod cmds { let client = SubCmd::parse(matches).map(Self::Client); let wallet = SubCmd::parse(matches).map(Self::Wallet); let ledger = SubCmd::parse(matches).map(Self::Ledger); - let gossip = SubCmd::parse(matches).map(Self::Gossip); - let matchmaker = SubCmd::parse(matches).map(Self::Matchmaker); let tx_custom = SubCmd::parse(matches).map(Self::TxCustom); let tx_transfer = SubCmd::parse(matches).map(Self::TxTransfer); let tx_update_vp = SubCmd::parse(matches).map(Self::TxUpdateVp); @@ -89,12 +82,9 @@ pub mod cmds { SubCmd::parse(matches).map(Self::TxInitProposal); let tx_vote_proposal = SubCmd::parse(matches).map(Self::TxVoteProposal); - let intent = SubCmd::parse(matches).map(Self::Intent); node.or(client) .or(wallet) .or(ledger) - .or(gossip) - .or(matchmaker) .or(tx_custom) .or(tx_transfer) .or(tx_update_vp) @@ -102,7 +92,6 @@ pub mod cmds { .or(tx_nft_mint) .or(tx_init_proposal) .or(tx_vote_proposal) - .or(intent) } } @@ -112,25 +101,18 @@ pub mod cmds { #[allow(clippy::large_enum_variant)] pub enum AnomaNode { Ledger(Ledger), - Gossip(Gossip), - Matchmaker(Matchmaker), Config(Config), } impl Cmd for AnomaNode { fn add_sub(app: App) -> App { - app.subcommand(Ledger::def()) - .subcommand(Gossip::def()) - .subcommand(Matchmaker::def()) - .subcommand(Config::def()) + app.subcommand(Ledger::def()).subcommand(Config::def()) } fn parse(matches: &ArgMatches) -> Option { let ledger = SubCmd::parse(matches).map(Self::Ledger); - let gossip = SubCmd::parse(matches).map(Self::Gossip); - let matchmaker = SubCmd::parse(matches).map(Self::Matchmaker); let config = SubCmd::parse(matches).map(Self::Config); - ledger.or(gossip).or(matchmaker).or(config) + ledger.or(config) } } impl SubCmd for AnomaNode { @@ -194,9 +176,6 @@ pub mod cmds { .subcommand(QueryProposal::def().display_order(3)) .subcommand(QueryProposalResult::def().display_order(3)) .subcommand(QueryProtocolParameters::def().display_order(3)) - // Intents - .subcommand(Intent::def().display_order(4)) - .subcommand(SubscribeTopic::def().display_order(4)) // Utils .subcommand(Utils::def().display_order(5)) } @@ -231,8 +210,6 @@ pub mod cmds { Self::parse_with_ctx(matches, QueryProposalResult); let query_protocol_parameters = Self::parse_with_ctx(matches, QueryProtocolParameters); - let intent = Self::parse_with_ctx(matches, Intent); - let subscribe_topic = Self::parse_with_ctx(matches, SubscribeTopic); let utils = SubCmd::parse(matches).map(Self::WithoutContext); tx_custom .or(tx_transfer) @@ -256,8 +233,6 @@ pub mod cmds { .or(query_proposal) .or(query_proposal_result) .or(query_protocol_parameters) - .or(intent) - .or(subscribe_topic) .or(utils) } } @@ -316,9 +291,6 @@ pub mod cmds { QueryProposal(QueryProposal), QueryProposalResult(QueryProposalResult), QueryProtocolParameters(QueryProtocolParameters), - // Gossip cmds - Intent(Intent), - SubscribeTopic(SubscribeTopic), } #[derive(Clone, Debug)] @@ -480,7 +452,7 @@ pub mod cmds { #[derive(Clone, Debug)] pub enum WalletAddress { Gen(AddressGen), - Find(AddressFind), + Find(AddressOrAliasFind), List(AddressList), Add(AddressAdd), } @@ -506,7 +478,7 @@ pub mod cmds { ) .setting(AppSettings::SubcommandRequiredElseHelp) .subcommand(AddressGen::def()) - .subcommand(AddressFind::def()) + .subcommand(AddressOrAliasFind::def()) .subcommand(AddressList::def()) .subcommand(AddressAdd::def()) } @@ -538,21 +510,23 @@ pub mod cmds { /// Find an address by its alias #[derive(Clone, Debug)] - pub struct AddressFind(pub args::AddressFind); + pub struct AddressOrAliasFind(pub args::AddressOrAliasFind); - impl SubCmd for AddressFind { + impl SubCmd for AddressOrAliasFind { const CMD: &'static str = "find"; fn parse(matches: &ArgMatches) -> Option { - matches - .subcommand_matches(Self::CMD) - .map(|matches| AddressFind(args::AddressFind::parse(matches))) + matches.subcommand_matches(Self::CMD).map(|matches| { + AddressOrAliasFind(args::AddressOrAliasFind::parse(matches)) + }) } fn def() -> App { App::new(Self::CMD) - .about("Find an address by its alias.") - .add_args::() + .about( + "Find an address by its alias or an alias by its address.", + ) + .add_args::() } } @@ -657,76 +631,6 @@ pub mod cmds { } } - #[derive(Clone, Debug)] - pub enum Gossip { - Run(GossipRun), - } - - impl SubCmd for Gossip { - const CMD: &'static str = "gossip"; - - fn parse(matches: &ArgMatches) -> Option { - matches.subcommand_matches(Self::CMD).and_then(|matches| { - let run = SubCmd::parse(matches).map(Gossip::Run); - run - // The `run` command is the default if no sub-command given - .or_else(|| { - Some(Gossip::Run(GossipRun(args::GossipRun::parse( - matches, - )))) - }) - }) - } - - fn def() -> App { - App::new(Self::CMD) - .about( - "Gossip node sub-commands. If no sub-command specified, \ - defaults to run the node.", - ) - .subcommand(GossipRun::def()) - .add_args::() - } - } - - #[derive(Clone, Debug)] - pub struct Matchmaker(pub args::Matchmaker); - - impl SubCmd for Matchmaker { - const CMD: &'static str = "matchmaker"; - - fn parse(matches: &ArgMatches) -> Option { - matches - .subcommand_matches(Self::CMD) - .map(|matches| Matchmaker(args::Matchmaker::parse(matches))) - } - - fn def() -> App { - App::new(Self::CMD) - .about("Run a matchmaker.") - .add_args::() - } - } - - #[derive(Clone, Debug)] - pub struct GossipRun(pub args::GossipRun); - - impl SubCmd for GossipRun { - const CMD: &'static str = "run"; - - fn parse(matches: &ArgMatches) -> Option { - matches - .subcommand_matches(Self::CMD) - .map(|matches| GossipRun(args::GossipRun::parse(matches))) - } - - fn def() -> App { - App::new(Self::CMD) - .about("Run a gossip node.") - .add_args::() - } - } - #[derive(Clone, Debug)] pub enum Config { Gen(ConfigGen), @@ -1218,50 +1122,10 @@ pub mod cmds { } } - #[derive(Clone, Debug)] - pub struct Intent(pub args::Intent); - - impl SubCmd for Intent { - const CMD: &'static str = "intent"; - - fn parse(matches: &ArgMatches) -> Option { - matches - .subcommand_matches(Self::CMD) - .map(|matches| Intent(args::Intent::parse(matches))) - } - - fn def() -> App { - App::new(Self::CMD) - .about("Send an intent.") - .add_args::() - } - } - - #[derive(Clone, Debug)] - pub struct SubscribeTopic(pub args::SubscribeTopic); - - impl SubCmd for SubscribeTopic { - const CMD: &'static str = "subscribe-topic"; - - fn parse(matches: &ArgMatches) -> Option { - matches.subcommand_matches(Self::CMD).map(|matches| { - SubscribeTopic(args::SubscribeTopic::parse(matches)) - }) - } - - fn def() -> App { - App::new(Self::CMD) - .about( - "Subscribe intent gossip node with a matchmaker to a \ - topic.", - ) - .add_args::() - } - } - #[derive(Clone, Debug)] pub enum Utils { JoinNetwork(JoinNetwork), + FetchWasms(FetchWasms), InitNetwork(InitNetwork), InitGenesisValidator(InitGenesisValidator), } @@ -1273,11 +1137,15 @@ pub mod cmds { matches.subcommand_matches(Self::CMD).and_then(|matches| { let join_network = SubCmd::parse(matches).map(Self::JoinNetwork); + let fetch_wasms = SubCmd::parse(matches).map(Self::FetchWasms); let init_network = SubCmd::parse(matches).map(Self::InitNetwork); let init_genesis = SubCmd::parse(matches).map(Self::InitGenesisValidator); - join_network.or(init_network).or(init_genesis) + join_network + .or(fetch_wasms) + .or(init_network) + .or(init_genesis) }) } @@ -1285,6 +1153,7 @@ pub mod cmds { App::new(Self::CMD) .about("Utilities.") .subcommand(JoinNetwork::def()) + .subcommand(FetchWasms::def()) .subcommand(InitNetwork::def()) .subcommand(InitGenesisValidator::def()) .setting(AppSettings::SubcommandRequiredElseHelp) @@ -1310,6 +1179,25 @@ pub mod cmds { } } + #[derive(Clone, Debug)] + pub struct FetchWasms(pub args::FetchWasms); + + impl SubCmd for FetchWasms { + const CMD: &'static str = "fetch-wasms"; + + fn parse(matches: &ArgMatches) -> Option { + matches + .subcommand_matches(Self::CMD) + .map(|matches| Self(args::FetchWasms::parse(matches))) + } + + fn def() -> App { + App::new(Self::CMD) + .about("Ensure pre-built wasms are present") + .add_args::() + } + } + #[derive(Clone, Debug)] pub struct InitNetwork(pub args::InitNetwork); @@ -1355,27 +1243,22 @@ pub mod cmds { pub mod args { - use std::convert::TryFrom; use std::env; - use std::fs::File; use std::net::SocketAddr; use std::path::PathBuf; use std::str::FromStr; - use libp2p::Multiaddr; use namada::types::address::Address; use namada::types::chain::{ChainId, ChainIdPrefix}; use namada::types::governance::ProposalVote; - use namada::types::intent::{DecimalWrapper, Exchange}; use namada::types::key::*; use namada::types::storage::{self, Epoch}; use namada::types::token; use namada::types::transaction::GasLimit; - use serde::Deserialize; use super::context::{WalletAddress, WalletKeypair, WalletPublicKey}; use super::utils::*; - use super::ArgMatches; + use super::{ArgGroup, ArgMatches}; use crate::config; use crate::config::TendermintMode; use crate::facade::tendermint::Timeout; @@ -1415,17 +1298,11 @@ pub mod args { const FEE_TOKEN: ArgDefaultFromCtx = arg_default_from_ctx("fee-token", DefaultFn(|| "XAN".into())); const FORCE: ArgFlag = flag("force"); + const DONT_PREFETCH_WASM: ArgFlag = flag("dont-prefetch-wasm"); const GAS_LIMIT: ArgDefault = arg_default("gas-limit", DefaultFn(|| token::Amount::from(0))); const GENESIS_PATH: Arg = arg("genesis-path"); const GENESIS_VALIDATOR: ArgOpt = arg("genesis-validator").opt(); - const INTENT_GOSSIPER_ADDR: ArgDefault = arg_default( - "intent-gossiper", - DefaultFn(|| { - let raw = "127.0.0.1:26661"; - SocketAddr::from_str(raw).unwrap() - }), - ); const LEDGER_ADDRESS_ABOUT: &str = "Address of a ledger node as \"{scheme}://{host}:{port}\". If the \ scheme is not supplied, it is assumed to be TCP."; @@ -1437,12 +1314,8 @@ pub mod args { const LEDGER_ADDRESS: Arg = arg("ledger-address"); const LOCALHOST: ArgFlag = flag("localhost"); - const MATCHMAKER_PATH: ArgOpt = arg_opt("matchmaker-path"); const MODE: ArgOpt = arg_opt("mode"); - const MULTIADDR_OPT: ArgOpt = arg_opt("address"); const NET_ADDRESS: Arg = arg("net-address"); - const NODE_OPT: ArgOpt = arg_opt("node"); - const NODE: Arg = arg("node"); const NFT_ADDRESS: Arg
= arg("nft-address"); const OWNER: ArgOpt = arg_opt("owner"); const PROPOSAL_OFFLINE: ArgFlag = flag("offline"); @@ -1453,10 +1326,10 @@ pub mod args { const PROPOSAL_ID_OPT: ArgOpt = arg_opt("proposal-id"); const PROPOSAL_VOTE: Arg = arg("vote"); const RAW_ADDRESS: Arg
= arg("address"); + const RAW_ADDRESS_OPT: ArgOpt
= RAW_ADDRESS.opt(); const RAW_PUBLIC_KEY_OPT: ArgOpt = arg_opt("public-key"); const REWARDS_CODE_PATH: ArgOpt = arg_opt("rewards-code-path"); const REWARDS_KEY: ArgOpt = arg_opt("rewards-key"); - const RPC_SOCKET_ADDR: ArgOpt = arg_opt("rpc"); const SCHEME: ArgDefault = arg_default("scheme", DefaultFn(|| SchemeType::Ed25519)); const SIGNER: ArgOpt = arg_opt("signer"); @@ -1467,12 +1340,8 @@ pub mod args { const STORAGE_KEY: Arg = arg("storage-key"); const SUB_PREFIX: ArgOpt = arg_opt("sub-prefix"); const TARGET: Arg = arg("target"); - const TO_STDOUT: ArgFlag = flag("stdout"); const TOKEN_OPT: ArgOpt = TOKEN.opt(); const TOKEN: Arg = arg("token"); - const TOPIC_OPT: ArgOpt = arg_opt("topic"); - const TOPIC: Arg = arg("topic"); - const TX_CODE_PATH: ArgOpt = arg_opt("tx-code-path"); const TX_HASH: Arg = arg("tx-hash"); const UNSAFE_DONT_ENCRYPT: ArgFlag = flag("unsafe-dont-encrypt"); const UNSAFE_SHOW_SECRET: ArgFlag = flag("unsafe-show-secret"); @@ -1527,9 +1396,7 @@ pub mod args { )) .arg(WASM_DIR.def().about( "Directory with built WASM validity predicates, \ - transactions and matchmaker files. This must not be an \ - absolute path as the directory is nested inside the \ - chain directory. This value can also be set via \ + transactions. This value can also be set via \ `ANOMA_WASM_DIR` environment variable, but the argument \ takes precedence, if specified.", )) @@ -2249,67 +2116,6 @@ pub mod args { } } - /// Helper struct for generating intents - #[derive(Debug, Clone, Deserialize)] - pub struct ExchangeDefinition { - /// The source address - pub addr: String, - /// The token to be sold - pub token_sell: String, - /// The minimum rate - pub rate_min: String, - /// The maximum amount of token to be sold - pub max_sell: String, - /// The token to be bought - pub token_buy: String, - /// The amount of token to be bought - pub min_buy: String, - /// The path to the wasm vp code - pub vp_path: Option, - } - - impl TryFrom for Exchange { - type Error = &'static str; - - fn try_from( - value: ExchangeDefinition, - ) -> Result { - let vp = if let Some(path) = value.vp_path { - if let Ok(wasm) = std::fs::read(path.clone()) { - Some(wasm) - } else { - eprintln!("File {} was not found.", path); - None - } - } else { - None - }; - - let addr = Address::decode(value.addr) - .expect("Addr should be a valid address"); - let token_buy = Address::decode(value.token_buy) - .expect("Token_buy should be a valid address"); - let token_sell = Address::decode(value.token_sell) - .expect("Token_sell should be a valid address"); - let min_buy = token::Amount::from_str(&value.min_buy) - .expect("Min_buy must be convertible to number"); - let max_sell = token::Amount::from_str(&value.max_sell) - .expect("Max_sell must be convertible to number"); - let rate_min = DecimalWrapper::from_str(&value.rate_min) - .expect("Max_sell must be convertible to decimal."); - - Ok(Exchange { - addr, - token_sell, - rate_min, - max_sell, - token_buy, - min_buy, - vp, - }) - } - } - /// Query PoS bond(s) #[derive(Clone, Debug)] pub struct QueryBonds { @@ -2428,218 +2234,6 @@ pub mod args { .arg(STORAGE_KEY.def().about("Storage key")) } } - /// Intent arguments - #[derive(Clone, Debug)] - pub struct Intent { - /// Gossip node address - pub node_addr: Option, - /// Intent topic - pub topic: Option, - /// Source address - pub source: Option, - /// Signing key - pub signing_key: Option, - /// Exchanges description - pub exchanges: Vec, - /// The address of the ledger node as host:port - pub ledger_address: TendermintAddress, - /// Print output to stdout - pub to_stdout: bool, - } - - impl Args for Intent { - fn parse(matches: &ArgMatches) -> Self { - let node_addr = NODE_OPT.parse(matches); - let data_path = DATA_PATH.parse(matches); - let source = SOURCE_OPT.parse(matches); - let signing_key = SIGNING_KEY_OPT.parse(matches); - let to_stdout = TO_STDOUT.parse(matches); - let topic = TOPIC_OPT.parse(matches); - - let file = File::open(&data_path).expect("File must exist."); - let exchange_definitions: Vec = - serde_json::from_reader(file) - .expect("JSON was not well-formatted"); - - let exchanges: Vec = exchange_definitions - .iter() - .map(|item| { - Exchange::try_from(item.clone()).expect( - "Conversion from ExchangeDefinition to Exchange \ - should not fail.", - ) - }) - .collect(); - let ledger_address = LEDGER_ADDRESS_DEFAULT.parse(matches); - - Self { - node_addr, - topic, - source, - signing_key, - exchanges, - ledger_address, - to_stdout, - } - } - - fn def(app: App) -> App { - app.arg( - NODE_OPT - .def() - .about("The gossip node address.") - .conflicts_with(TO_STDOUT.name), - ) - .arg(DATA_PATH.def().about( - "The data of the intent, that contains all value necessary \ - for the matchmaker.", - )) - .arg( - SOURCE_OPT - .def() - .about( - "Sign the intent with the key of a given address or \ - address alias from your wallet.", - ) - .conflicts_with(SIGNING_KEY_OPT.name), - ) - .arg( - SIGNING_KEY_OPT - .def() - .about( - "Sign the intent with the key for the given public \ - key, public key hash or alias from your wallet.", - ) - .conflicts_with(SOURCE_OPT.name), - ) - .arg(LEDGER_ADDRESS_DEFAULT.def().about(LEDGER_ADDRESS_ABOUT)) - .arg( - TOPIC_OPT - .def() - .about("The subnetwork where the intent should be sent to.") - .conflicts_with(TO_STDOUT.name), - ) - .arg( - TO_STDOUT - .def() - .about( - "Echo the serialized intent to stdout. Note that with \ - this option, the intent won't be submitted to the \ - intent gossiper RPC.", - ) - .conflicts_with_all(&[NODE_OPT.name, TOPIC.name]), - ) - } - } - - /// Subscribe intent topic arguments - #[derive(Clone, Debug)] - pub struct SubscribeTopic { - /// Gossip node address - pub node_addr: String, - /// Intent topic - pub topic: String, - } - - impl Args for SubscribeTopic { - fn parse(matches: &ArgMatches) -> Self { - let node_addr = NODE.parse(matches); - let topic = TOPIC.parse(matches); - Self { node_addr, topic } - } - - fn def(app: App) -> App { - app.arg(NODE.def().about("The gossip node address.")).arg( - TOPIC - .def() - .about("The new topic of interest for that node."), - ) - } - } - - #[derive(Clone, Debug)] - pub struct GossipRun { - pub addr: Option, - pub rpc: Option, - } - - impl Args for GossipRun { - fn parse(matches: &ArgMatches) -> Self { - let addr = MULTIADDR_OPT.parse(matches); - let rpc = RPC_SOCKET_ADDR.parse(matches); - Self { addr, rpc } - } - - fn def(app: App) -> App { - app.arg( - MULTIADDR_OPT - .def() - .about("Gossip service address as host:port."), - ) - .arg(RPC_SOCKET_ADDR.def().about("Enable RPC service.")) - } - } - - #[derive(Clone, Debug)] - pub struct Matchmaker { - pub matchmaker_path: Option, - pub tx_code_path: Option, - pub intent_gossiper_addr: SocketAddr, - pub ledger_addr: TendermintAddress, - pub tx_signing_key: WalletKeypair, - pub tx_source_address: WalletAddress, - } - - impl Args for Matchmaker { - fn parse(matches: &ArgMatches) -> Self { - let intent_gossiper_addr = INTENT_GOSSIPER_ADDR.parse(matches); - let matchmaker_path = MATCHMAKER_PATH.parse(matches); - let tx_code_path = TX_CODE_PATH.parse(matches); - let ledger_addr = LEDGER_ADDRESS_DEFAULT.parse(matches); - let tx_signing_key = SIGNING_KEY.parse(matches); - let tx_source_address = SOURCE.parse(matches); - Self { - intent_gossiper_addr, - matchmaker_path, - tx_code_path, - ledger_addr, - tx_signing_key, - tx_source_address, - } - } - - fn def(app: App) -> App { - app.arg(INTENT_GOSSIPER_ADDR.def().about( - "Intent Gossiper endpoint for matchmaker connections as \ - \"{host}:{port}\".", - )) - .arg(MATCHMAKER_PATH.def().about( - "The file name of the matchmaker compiled to a dynamic \ - library (the filename extension is optional).", - )) - .arg( - TX_CODE_PATH - .def() - .about("The transaction code to use with the matchmaker."), - ) - .arg(LEDGER_ADDRESS_DEFAULT.def().about( - "The address of the ledger as \"{scheme}://{host}:{port}\" \ - that the matchmaker must send transactions to. If the scheme \ - is not supplied, it is assumed to be TCP.", - )) - .arg(SIGNING_KEY.def().about( - "Sign the transactions created by the matchmaker with the key \ - for the given public key, public key hash or alias from your \ - wallet.", - )) - .arg(SOURCE.def().about( - "Source address or alias of an address of the transactions \ - created by the matchmaker. This must be matching the signing \ - key.", - )) - } - } - /// Common transaction arguments #[derive(Clone, Debug)] pub struct Tx { @@ -2903,14 +2497,16 @@ pub mod args { /// Wallet address lookup arguments #[derive(Clone, Debug)] - pub struct AddressFind { - pub alias: String, + pub struct AddressOrAliasFind { + pub alias: Option, + pub address: Option
, } - impl Args for AddressFind { + impl Args for AddressOrAliasFind { fn parse(matches: &ArgMatches) -> Self { - let alias = ALIAS.parse(matches); - Self { alias } + let alias = ALIAS_OPT.parse(matches); + let address = RAW_ADDRESS_OPT.parse(matches); + Self { alias, address } } fn def(app: App) -> App { @@ -2919,6 +2515,16 @@ pub mod args { .def() .about("An alias associated with the address."), ) + .arg( + RAW_ADDRESS_OPT + .def() + .about("The bech32m encoded address string."), + ) + .group( + ArgGroup::new("find_flags") + .args(&[ALIAS_OPT.name, RAW_ADDRESS_OPT.name]) + .required(true), + ) } } @@ -2955,6 +2561,7 @@ pub mod args { pub chain_id: ChainId, pub genesis_validator: Option, pub pre_genesis_path: Option, + pub dont_prefetch_wasm: bool, } impl Args for JoinNetwork { @@ -2962,10 +2569,12 @@ pub mod args { let chain_id = CHAIN_ID.parse(matches); let genesis_validator = GENESIS_VALIDATOR.parse(matches); let pre_genesis_path = PRE_GENESIS_PATH.parse(matches); + let dont_prefetch_wasm = DONT_PREFETCH_WASM.parse(matches); Self { chain_id, genesis_validator, pre_genesis_path, + dont_prefetch_wasm, } } @@ -2973,6 +2582,25 @@ pub mod args { app.arg(CHAIN_ID.def().about("The chain ID. The chain must be known in the https://github.com/heliaxdev/anoma-network-config repository.")) .arg(GENESIS_VALIDATOR.def().about("The alias of the genesis validator that you want to set up as, if any.")) .arg(PRE_GENESIS_PATH.def().about("The path to the pre-genesis directory for genesis validator, if any. Defaults to \"{base-dir}/pre-genesis/{genesis-validator}\".")) + .arg(DONT_PREFETCH_WASM.def().about( + "Do not pre-fetch WASM.", + )) + } + } + + #[derive(Clone, Debug)] + pub struct FetchWasms { + pub chain_id: ChainId, + } + + impl Args for FetchWasms { + fn parse(matches: &ArgMatches) -> Self { + let chain_id = CHAIN_ID.parse(matches); + Self { chain_id } + } + + fn def(app: App) -> App { + app.arg(CHAIN_ID.def().about("The chain ID. The chain must be known in the https://github.com/heliaxdev/anoma-network-config repository, in which case it should have pre-built wasms available for download.")) } } @@ -3039,7 +2667,7 @@ pub mod args { )) .arg(LOCALHOST.def().about( "Use localhost address for P2P and RPC connections for the \ - validators ledger and intent gossip nodes", + validators ledger", )) .arg(ALLOW_DUPLICATE_IP.def().about( "Toggle to disable guard against peers connecting from the \ @@ -3113,7 +2741,7 @@ pub fn anoma_cli() -> (cmds::Anoma, String) { safe_exit(2); } -pub fn anoma_node_cli() -> (cmds::AnomaNode, Context) { +pub fn anoma_node_cli() -> Result<(cmds::AnomaNode, Context)> { let app = anoma_node_app(); cmds::AnomaNode::parse_or_print_help(app) } @@ -3123,7 +2751,7 @@ pub enum AnomaClient { WithContext(Box<(cmds::AnomaClientWithContext, Context)>), } -pub fn anoma_client_cli() -> AnomaClient { +pub fn anoma_client_cli() -> Result { let app = anoma_client_app(); let mut app = cmds::AnomaClient::add_sub(app); let matches = app.clone().get_matches(); @@ -3132,11 +2760,11 @@ pub fn anoma_client_cli() -> AnomaClient { let global_args = args::Global::parse(&matches); match cmd { cmds::AnomaClient::WithContext(sub_cmd) => { - let context = Context::new(global_args); - AnomaClient::WithContext(Box::new((sub_cmd, context))) + let context = Context::new(global_args)?; + Ok(AnomaClient::WithContext(Box::new((sub_cmd, context)))) } cmds::AnomaClient::WithoutContext(sub_cmd) => { - AnomaClient::WithoutContext(sub_cmd, global_args) + Ok(AnomaClient::WithoutContext(sub_cmd, global_args)) } } } @@ -3147,7 +2775,7 @@ pub fn anoma_client_cli() -> AnomaClient { } } -pub fn anoma_wallet_cli() -> (cmds::AnomaWallet, Context) { +pub fn anoma_wallet_cli() -> Result<(cmds::AnomaWallet, Context)> { let app = anoma_wallet_app(); cmds::AnomaWallet::parse_or_print_help(app) } @@ -3155,7 +2783,6 @@ pub fn anoma_wallet_cli() -> (cmds::AnomaWallet, Context) { fn anoma_app() -> App { let app = App::new(APP_NAME) .version(anoma_version()) - .author(crate_authors!("\n")) .about("Anoma command line interface.") .setting(AppSettings::SubcommandRequiredElseHelp); cmds::Anoma::add_sub(args::Global::def(app)) @@ -3164,7 +2791,6 @@ fn anoma_app() -> App { fn anoma_node_app() -> App { let app = App::new(APP_NAME) .version(anoma_version()) - .author(crate_authors!("\n")) .about("Anoma node command line interface.") .setting(AppSettings::SubcommandRequiredElseHelp); cmds::AnomaNode::add_sub(args::Global::def(app)) @@ -3173,7 +2799,6 @@ fn anoma_node_app() -> App { fn anoma_client_app() -> App { let app = App::new(APP_NAME) .version(anoma_version()) - .author(crate_authors!("\n")) .about("Anoma client command line interface.") .setting(AppSettings::SubcommandRequiredElseHelp); cmds::AnomaClient::add_sub(args::Global::def(app)) @@ -3182,7 +2807,6 @@ fn anoma_client_app() -> App { fn anoma_wallet_app() -> App { let app = App::new(APP_NAME) .version(anoma_version()) - .author(crate_authors!("\n")) .about("Anoma wallet command line interface.") .setting(AppSettings::SubcommandRequiredElseHelp); cmds::AnomaWallet::add_sub(args::Global::def(app)) diff --git a/apps/src/lib/cli/context.rs b/apps/src/lib/cli/context.rs index 8189b633bf..fc6db9633b 100644 --- a/apps/src/lib/cli/context.rs +++ b/apps/src/lib/cli/context.rs @@ -6,6 +6,7 @@ use std::path::{Path, PathBuf}; use std::rc::Rc; use std::str::FromStr; +use color_eyre::eyre::Result; use namada::types::address::Address; use namada::types::chain::ChainId; use namada::types::key::*; @@ -44,12 +45,12 @@ pub struct Context { pub wallet: Wallet, /// The global configuration pub global_config: GlobalConfig, - /// The ledger & intent gossip configuration for a specific chain ID + /// The ledger configuration for a specific chain ID pub config: Config, } impl Context { - pub fn new(global_args: args::Global) -> Self { + pub fn new(global_args: args::Global) -> Result { let global_config = read_or_try_new_global_config(&global_args); tracing::info!("Chain ID: {}", global_config.default_chain_id); @@ -65,43 +66,29 @@ impl Context { let genesis_file_path = global_args .base_dir .join(format!("{}.toml", global_config.default_chain_id.as_str())); - let wallet = Wallet::load_or_new_from_genesis(&chain_dir, move || { - genesis_config::open_genesis_config(genesis_file_path) - }); + let wallet = Wallet::load_or_new_from_genesis( + &chain_dir, + genesis_config::open_genesis_config(&genesis_file_path)?, + ); // If the WASM dir specified, put it in the config match global_args.wasm_dir.as_ref() { Some(wasm_dir) => { - if wasm_dir.is_absolute() { - eprintln!( - "The arg `--wasm-dir` cannot be an absolute path. It \ - is nested inside the chain directory." - ); - safe_exit(1); - } config.wasm_dir = wasm_dir.clone(); } None => { if let Ok(wasm_dir) = env::var(ENV_VAR_WASM_DIR) { let wasm_dir: PathBuf = wasm_dir.into(); - if wasm_dir.is_absolute() { - eprintln!( - "The env var `{}` cannot be an absolute path. It \ - is nested inside the chain directory.", - ENV_VAR_WASM_DIR - ); - safe_exit(1); - } config.wasm_dir = wasm_dir; } } } - Self { + Ok(Self { global_args, wallet, global_config, config, - } + }) } /// Parse and/or look-up the value from the context. @@ -166,7 +153,7 @@ impl Context { /// Read the given WASM file from the WASM directory or an absolute path. pub fn read_wasm(&self, file_name: impl AsRef) -> Vec { - wasm_loader::read_wasm(self.wasm_dir(), file_name) + wasm_loader::read_wasm_or_exit(self.wasm_dir(), file_name) } } diff --git a/apps/src/lib/cli/utils.rs b/apps/src/lib/cli/utils.rs index f47ec42696..56965d72ef 100644 --- a/apps/src/lib/cli/utils.rs +++ b/apps/src/lib/cli/utils.rs @@ -4,6 +4,7 @@ use std::marker::PhantomData; use std::str::FromStr; use clap::ArgMatches; +use color_eyre::eyre::Result; use super::args; use super::context::{Context, FromContext}; @@ -16,14 +17,14 @@ pub trait Cmd: Sized { fn add_sub(app: App) -> App; fn parse(matches: &ArgMatches) -> Option; - fn parse_or_print_help(app: App) -> (Self, Context) { + fn parse_or_print_help(app: App) -> Result<(Self, Context)> { let mut app = Self::add_sub(app); let matches = app.clone().get_matches(); match Self::parse(&matches) { Some(cmd) => { let global_args = args::Global::parse(&matches); - let context = Context::new(global_args); - (cmd, context) + let context = Context::new(global_args)?; + Ok((cmd, context)) } None => { app.print_help().unwrap(); diff --git a/apps/src/lib/client/gossip.rs b/apps/src/lib/client/gossip.rs deleted file mode 100644 index 80444d942c..0000000000 --- a/apps/src/lib/client/gossip.rs +++ /dev/null @@ -1,116 +0,0 @@ -use std::collections::HashSet; -use std::io::Write; - -use borsh::BorshSerialize; -use namada::proto::Signed; -use namada::types::intent::{Exchange, FungibleTokenIntent}; - -use super::signing; -use crate::cli::{self, args, Context}; -use crate::facade::tendermint_config::net::Address as TendermintAddress; -use crate::proto::services::rpc_service_client::RpcServiceClient; -use crate::proto::{services, RpcMessage}; -use crate::wallet::Wallet; - -/// Create an intent, sign it and submit it to the gossip node (unless -/// `to_stdout` is `true`). -pub async fn gossip_intent( - mut ctx: Context, - args::Intent { - node_addr, - topic, - source, - signing_key, - exchanges, - ledger_address, - to_stdout, - }: args::Intent, -) { - let mut signed_exchanges: HashSet> = - HashSet::with_capacity(exchanges.len()); - for exchange in exchanges { - let signed = - sign_exchange(&mut ctx.wallet, exchange, ledger_address.clone()) - .await; - signed_exchanges.insert(signed); - } - - let source_keypair = match ctx.get_opt_cached(&signing_key) { - Some(key) => key, - None => { - let source = ctx.get_opt(&source).unwrap_or_else(|| { - eprintln!("A source or a signing key is required."); - cli::safe_exit(1) - }); - signing::find_keypair( - &mut ctx.wallet, - &source, - ledger_address.clone(), - ) - .await - } - }; - let signed_ft: Signed = Signed::new( - &*source_keypair, - FungibleTokenIntent { - exchange: signed_exchanges, - }, - ); - let data_bytes = signed_ft.try_to_vec().unwrap(); - - if to_stdout { - let mut out = std::io::stdout(); - out.write_all(&data_bytes).unwrap(); - out.flush().unwrap(); - } else { - let node_addr = node_addr.expect( - "Gossip node address must be defined to submit the intent to it.", - ); - let topic = topic.expect( - "The topic must be defined to submit the intent to a gossip node.", - ); - - match RpcServiceClient::connect(node_addr.clone()).await { - Ok(mut client) => { - let intent = namada::proto::Intent::new(data_bytes); - let message: services::RpcMessage = - RpcMessage::new_intent(intent, topic).into(); - let response = client.send_message(message).await.expect( - "Failed to send message and/or receive rpc response", - ); - println!("{:#?}", response); - } - Err(e) => { - eprintln!( - "Error connecting RPC client to {}: {}", - node_addr, e - ); - } - }; - } -} - -/// Request an intent gossip node with a matchmaker to subscribe to a given -/// topic. -pub async fn subscribe_topic( - _ctx: Context, - args::SubscribeTopic { node_addr, topic }: args::SubscribeTopic, -) { - let mut client = RpcServiceClient::connect(node_addr).await.unwrap(); - let message: services::RpcMessage = RpcMessage::new_topic(topic).into(); - let response = client - .send_message(message) - .await - .expect("failed to send message and/or receive rpc response"); - println!("{:#?}", response); -} - -async fn sign_exchange( - wallet: &mut Wallet, - exchange: Exchange, - ledger_address: TendermintAddress, -) -> Signed { - let source_keypair = - signing::find_keypair(wallet, &exchange.addr, ledger_address).await; - Signed::new(&*source_keypair, exchange.clone()) -} diff --git a/apps/src/lib/client/mod.rs b/apps/src/lib/client/mod.rs index 18b32889b5..9807ca6a30 100644 --- a/apps/src/lib/client/mod.rs +++ b/apps/src/lib/client/mod.rs @@ -1,4 +1,3 @@ -pub mod gossip; pub mod rpc; pub mod signing; pub mod tendermint_rpc_types; diff --git a/apps/src/lib/client/rpc.rs b/apps/src/lib/client/rpc.rs index b70c9b3483..d191fada0f 100644 --- a/apps/src/lib/client/rpc.rs +++ b/apps/src/lib/client/rpc.rs @@ -11,7 +11,9 @@ use async_std::fs::{self}; use async_std::path::PathBuf; use async_std::prelude::*; use borsh::BorshDeserialize; +use data_encoding::HEXLOWER; use itertools::Itertools; +use namada::ledger::governance::parameters::GovParams; use namada::ledger::governance::storage as gov_storage; use namada::ledger::governance::utils::Votes; use namada::ledger::parameters::{storage as param_storage, EpochDuration}; @@ -21,10 +23,10 @@ use namada::ledger::pos::types::{ use namada::ledger::pos::{ self, is_validator_slashes_key, BondId, Bonds, PosParams, Slash, Unbonds, }; -use namada::ledger::treasury::storage as treasury_storage; use namada::types::address::Address; use namada::types::governance::{ - OfflineProposal, OfflineVote, ProposalVote, TallyResult, + OfflineProposal, OfflineVote, ProposalResult, ProposalVote, TallyResult, + VotePower, }; use namada::types::key::*; use namada::types::storage::{Epoch, Key, KeySeg, PrefixValue}; @@ -157,7 +159,7 @@ pub async fn query_raw_bytes(_ctx: Context, args: args::QueryRawBytes) { .unwrap(); match response.code { Code::Ok => { - println!("{}", hex::encode(&response.value)); + println!("{}", HEXLOWER.encode(&response.value)); } Code::Err(err) => { eprintln!( @@ -215,7 +217,7 @@ pub async fn query_balance(ctx: Context, args: args::QueryBalance) { ) .await; if let Some(balances) = balances { - print_balances(balances, &token, Some(&owner)); + print_balances(&ctx, balances, &token, Some(&owner)); } } } @@ -225,7 +227,7 @@ pub async fn query_balance(ctx: Context, args: args::QueryBalance) { let balances = query_storage_prefix::(client, prefix).await; if let Some(balances) = balances { - print_balances(balances, &token, None); + print_balances(&ctx, balances, &token, None); } } (None, None) => { @@ -235,7 +237,7 @@ pub async fn query_balance(ctx: Context, args: args::QueryBalance) { query_storage_prefix::(client.clone(), key) .await; if let Some(balances) = balances { - print_balances(balances, &token, None); + print_balances(&ctx, balances, &token, None); } } } @@ -243,6 +245,7 @@ pub async fn query_balance(ctx: Context, args: args::QueryBalance) { } fn print_balances( + ctx: &Context, balances: impl Iterator, token: &Address, target: Option<&Address>, @@ -265,13 +268,19 @@ fn print_balances( owner.clone(), format!( "with {}: {}, owned by {}", - sub_prefix, balance, owner + sub_prefix, + balance, + lookup_alias(ctx, owner) ), )), None => token::is_any_token_balance_key(&key).map(|owner| { ( owner.clone(), - format!(": {}, owned by {}", balance, owner), + format!( + ": {}, owned by {}", + balance, + lookup_alias(ctx, owner) + ), ) }), }, @@ -288,7 +297,10 @@ fn print_balances( if print_num == 0 { match target { - Some(t) => writeln!(w, "No balances owned by {}", t).unwrap(), + Some(t) => { + writeln!(w, "No balances owned by {}", lookup_alias(ctx, t)) + .unwrap() + } None => { writeln!(w, "No balances for token {}", currency_code).unwrap() } @@ -414,20 +426,17 @@ pub async fn query_proposal_result( match args.proposal_id { Some(id) => { - let start_epoch_key = gov_storage::get_voting_start_epoch_key(id); let end_epoch_key = gov_storage::get_voting_end_epoch_key(id); - let start_epoch = - query_storage_value::(&client, &start_epoch_key).await; let end_epoch = query_storage_value::(&client, &end_epoch_key).await; - match (start_epoch, end_epoch) { - (Some(start_epoch), Some(end_epoch)) => { + match end_epoch { + Some(end_epoch) => { if current_epoch > end_epoch { let votes = - get_proposal_votes(&client, start_epoch, id).await; + get_proposal_votes(&client, end_epoch, id).await; let proposal_result = - compute_tally(&client, start_epoch, votes).await; + compute_tally(&client, end_epoch, votes).await; println!("Proposal: {}", id); println!("{:4}Result: {}", "", proposal_result); } else { @@ -435,7 +444,7 @@ pub async fn query_proposal_result( cli::safe_exit(1) } } - _ => { + None => { eprintln!("Error while retriving proposal."); cli::safe_exit(1) } @@ -459,7 +468,14 @@ pub async fn query_proposal_result( if entry.file_name().eq(&"proposal") { is_proposal_present = true - } else { + } else if entry + .file_name() + .to_string_lossy() + .starts_with("proposal-vote-") + { + // Folder may contain other + // files than just the proposal + // and the votes files.insert(entry.path()); } } @@ -481,8 +497,8 @@ pub async fn query_proposal_result( if !is_proposal_present { eprintln!( - "The folder must contain a the offline \ - proposal in a file named proposal" + "The folder must contain the offline proposal \ + in a file named \"proposal\"" ); cli::safe_exit(1) } @@ -526,7 +542,10 @@ pub async fn query_proposal_result( } }; } else { - eprintln!("Either id or offline should be used as arguments."); + eprintln!( + "Either --proposal-id or --data-path should be provided \ + as arguments." + ); cli::safe_exit(1) } } @@ -539,45 +558,8 @@ pub async fn query_protocol_parameters( ) { let client = HttpClient::new(args.query.ledger_address).unwrap(); - println!("Goveranance parameters"); - let key = gov_storage::get_max_proposal_code_size_key(); - let max_proposal_code_size = query_storage_value::(&client, &key) - .await - .expect("Parameter should be definied."); - println!( - "{:4}Max. proposal code size: {}", - "", max_proposal_code_size - ); - - let key = gov_storage::get_max_proposal_content_key(); - let max_proposal_content = query_storage_value::(&client, &key) - .await - .expect("Parameter should be definied."); - println!( - "{:4}Max. proposal content size: {}", - "", max_proposal_content - ); - - let key = gov_storage::get_min_proposal_fund_key(); - let min_proposal_fund = query_storage_value::(&client, &key) - .await - .expect("Parameter should be definied."); - println!("{:4}Min. proposal funds: {}", "", min_proposal_fund); - - let key = gov_storage::get_min_proposal_grace_epoch_key(); - let min_proposal_grace_epoch = query_storage_value::(&client, &key) - .await - .expect("Parameter should be definied."); - println!( - "{:4}Min. proposal grace epoch: {}", - "", min_proposal_grace_epoch - ); - - let key = gov_storage::get_min_proposal_period_key(); - let min_proposal_period = query_storage_value::(&client, &key) - .await - .expect("Parameter should be definied."); - println!("{:4}Min. proposal period: {}", "", min_proposal_period); + let gov_parameters = get_governance_parameters(&client).await; + println!("Governance Parameters\n {:4}", gov_parameters); println!("Protocol parameters"); let key = param_storage::get_epoch_storage_key(); @@ -611,16 +593,6 @@ pub async fn query_protocol_parameters( .expect("Parameter should be definied."); println!("{:4}Transactions whitelist: {:?}", "", tx_whitelist); - println!("Treasury parameters"); - let key = treasury_storage::get_max_transferable_fund_key(); - let max_transferable_amount = query_storage_value::(&client, &key) - .await - .expect("Parameter should be definied."); - println!( - "{:4}Max. transferable amount: {}", - "", max_transferable_amount - ); - println!("PoS parameters"); let key = pos::params_key(); let pos_params = query_storage_value::(&client, &key) @@ -1306,7 +1278,7 @@ fn process_bonds_query( let mut total_active = total_active.unwrap_or_else(|| 0.into()); let mut current_total: token::Amount = 0.into(); for bond in bonds.iter() { - for (epoch_start, &(mut delta)) in bond.deltas.iter().sorted() { + for (epoch_start, &(mut delta)) in bond.pos_deltas.iter().sorted() { writeln!(w, " Active from epoch {}: Δ {}", epoch_start, delta) .unwrap(); delta = apply_slashes(slashes, delta, *epoch_start, None, Some(w)); @@ -1671,9 +1643,11 @@ pub async fn get_proposal_votes( query_storage_prefix::(client.clone(), vote_prefix_key) .await; - let mut yay_validators: HashMap = HashMap::new(); - let mut yay_delegators: HashMap = HashMap::new(); - let mut nay_delegators: HashMap = HashMap::new(); + let mut yay_validators: HashMap = HashMap::new(); + let mut yay_delegators: HashMap> = + HashMap::new(); + let mut nay_delegators: HashMap> = + HashMap::new(); if let Some(vote_iter) = vote_iter { for (key, vote) in vote_iter { @@ -1700,9 +1674,15 @@ pub async fn get_proposal_votes( .await; if let Some(amount) = delegator_token_amount { if vote.is_yay() { - yay_delegators.insert(voter_address, amount); + let entry = + yay_delegators.entry(voter_address).or_default(); + entry + .insert(validator_address, VotePower::from(amount)); } else { - nay_delegators.insert(voter_address, amount); + let entry = + nay_delegators.entry(voter_address).or_default(); + entry + .insert(validator_address, VotePower::from(amount)); } } } @@ -1725,9 +1705,11 @@ pub async fn get_proposal_offline_votes( let proposal_hash = proposal.compute_hash(); - let mut yay_validators: HashMap = HashMap::new(); - let mut yay_delegators: HashMap = HashMap::new(); - let mut nay_delegators: HashMap = HashMap::new(); + let mut yay_validators: HashMap = HashMap::new(); + let mut yay_delegators: HashMap> = + HashMap::new(); + let mut nay_delegators: HashMap> = + HashMap::new(); for path in files { let file = File::open(&path).expect("Proposal file must exist."); @@ -1766,25 +1748,72 @@ pub async fn get_proposal_offline_votes( let bonds_iter = query_storage_prefix::(client.clone(), key).await; if let Some(bonds) = bonds_iter { - for (key, epoched_amount) in bonds { - let bond = epoched_amount - .get(proposal.tally_epoch) - .expect("Delegation bond should be definied."); + for (key, epoched_bonds) in bonds { + // Look-up slashes for the validator in this key and + // apply them if any + let validator = pos::get_validator_address_from_bond(&key) + .expect( + "Delegation key should contain validator address.", + ); + let slashes_key = pos::validator_slashes_key(&validator); + let slashes = query_storage_value::( + client, + &slashes_key, + ) + .await + .unwrap_or_default(); + let mut delegated_amount: token::Amount = 0.into(); let epoch = namada::ledger::pos::types::Epoch::from( proposal.tally_epoch.0, ); - let amount = *bond - .deltas - .get(&epoch) - .expect("Delegation amount should be definied."); + let bond = epoched_bonds + .get(epoch) + .expect("Delegation bond should be defined."); + let mut to_deduct = bond.neg_deltas; + for (start_epoch, &(mut delta)) in + bond.pos_deltas.iter().sorted() + { + // deduct bond's neg_deltas + if to_deduct > delta { + to_deduct -= delta; + // If the whole bond was deducted, continue to + // the next one + continue; + } else { + delta -= to_deduct; + to_deduct = token::Amount::default(); + } + + delta = apply_slashes( + &slashes, + delta, + *start_epoch, + None, + None, + ); + delegated_amount += delta; + } + let validator_address = pos::get_validator_address_from_bond(&key).expect( "Delegation key should contain validator address.", ); if proposal_vote.vote.is_yay() { - yay_delegators.insert(validator_address, amount); + let entry = yay_delegators + .entry(proposal_vote.address.clone()) + .or_default(); + entry.insert( + validator_address, + VotePower::from(delegated_amount), + ); } else { - nay_delegators.insert(validator_address, amount); + let entry = nay_delegators + .entry(proposal_vote.address.clone()) + .or_default(); + entry.insert( + validator_address, + VotePower::from(delegated_amount), + ); } } } @@ -1803,7 +1832,7 @@ pub async fn compute_tally( client: &HttpClient, epoch: Epoch, votes: Votes, -) -> TallyResult { +) -> ProposalResult { let validators = get_all_validators(client, epoch).await; let total_stacked_tokens = get_total_staked_tokes(client, epoch, &validators).await; @@ -1814,29 +1843,43 @@ pub async fn compute_tally( nay_delegators, } = votes; - let mut total_yay_stacked_tokens = Amount::from(0); + let mut total_yay_stacked_tokens = VotePower::from(0_u64); for (_, amount) in yay_validators.clone().into_iter() { total_yay_stacked_tokens += amount; } // YAY: Add delegator amount whose validator didn't vote / voted nay - for (validator_address, amount) in yay_delegators.into_iter() { - if !yay_validators.contains_key(&validator_address) { - total_yay_stacked_tokens += amount; + for (_, vote_map) in yay_delegators.iter() { + for (validator_address, vote_power) in vote_map.iter() { + if !yay_validators.contains_key(validator_address) { + total_yay_stacked_tokens += vote_power; + } } } // NAY: Remove delegator amount whose validator validator vote yay - for (validator_address, amount) in nay_delegators.into_iter() { - if yay_validators.contains_key(&validator_address) { - total_yay_stacked_tokens -= amount; + for (_, vote_map) in nay_delegators.iter() { + for (validator_address, vote_power) in vote_map.iter() { + if yay_validators.contains_key(validator_address) { + total_yay_stacked_tokens -= vote_power; + } } } - if 3 * total_yay_stacked_tokens >= 2 * total_stacked_tokens { - TallyResult::Passed + if total_yay_stacked_tokens >= (total_stacked_tokens / 3) * 2 { + ProposalResult { + result: TallyResult::Passed, + total_voting_power: total_stacked_tokens, + total_yay_power: total_yay_stacked_tokens, + total_nay_power: 0, + } } else { - TallyResult::Rejected + ProposalResult { + result: TallyResult::Rejected, + total_voting_power: total_stacked_tokens, + total_yay_power: total_yay_stacked_tokens, + total_nay_power: 0, + } } } @@ -1859,7 +1902,21 @@ pub async fn get_bond_amount_at( Some(epoched_bonds) => { let mut delegated_amount: token::Amount = 0.into(); for bond in epoched_bonds.iter() { - for (epoch_start, &(mut delta)) in bond.deltas.iter().sorted() { + let mut to_deduct = bond.neg_deltas; + for (epoch_start, &(mut delta)) in + bond.pos_deltas.iter().sorted() + { + // deduct bond's neg_deltas + if to_deduct > delta { + to_deduct -= delta; + // If the whole bond was deducted, continue to + // the next one + continue; + } else { + delta -= to_deduct; + to_deduct = token::Amount::default(); + } + delta = apply_slashes( &slashes, delta, @@ -1901,8 +1958,8 @@ pub async fn get_total_staked_tokes( client: &HttpClient, epoch: Epoch, validators: &[Address], -) -> token::Amount { - let mut total = Amount::from(0); +) -> VotePower { + let mut total = VotePower::from(0_u64); for validator in validators { total += get_validator_stake(client, epoch, validator).await; @@ -1914,7 +1971,7 @@ async fn get_validator_stake( client: &HttpClient, epoch: Epoch, validator: &Address, -) -> token::Amount { +) -> VotePower { let total_voting_power_key = pos::validator_total_deltas_key(validator); let total_voting_power = query_storage_value::( client, @@ -1923,11 +1980,9 @@ async fn get_validator_stake( .await .expect("Total deltas should be defined"); let epoched_total_voting_power = total_voting_power.get(epoch); - if let Some(epoched_total_voting_power) = epoched_total_voting_power { - token::Amount::from_change(epoched_total_voting_power) - } else { - token::Amount::from(0) - } + + VotePower::try_from(epoched_total_voting_power.unwrap_or_default()) + .unwrap_or_default() } pub async fn get_delegators_delegation( @@ -1949,3 +2004,53 @@ pub async fn get_delegators_delegation( } delegation_addresses } + +pub async fn get_governance_parameters(client: &HttpClient) -> GovParams { + let key = gov_storage::get_max_proposal_code_size_key(); + let max_proposal_code_size = query_storage_value::(client, &key) + .await + .expect("Parameter should be definied."); + + let key = gov_storage::get_max_proposal_content_key(); + let max_proposal_content_size = query_storage_value::(client, &key) + .await + .expect("Parameter should be definied."); + + let key = gov_storage::get_min_proposal_fund_key(); + let min_proposal_fund = query_storage_value::(client, &key) + .await + .expect("Parameter should be definied."); + + let key = gov_storage::get_min_proposal_grace_epoch_key(); + let min_proposal_grace_epochs = query_storage_value::(client, &key) + .await + .expect("Parameter should be definied."); + + let key = gov_storage::get_min_proposal_period_key(); + let min_proposal_period = query_storage_value::(client, &key) + .await + .expect("Parameter should be definied."); + + let key = gov_storage::get_max_proposal_period_key(); + let max_proposal_period = query_storage_value::(client, &key) + .await + .expect("Parameter should be definied."); + + GovParams { + min_proposal_fund: u64::from(min_proposal_fund), + max_proposal_code_size, + min_proposal_period, + max_proposal_period, + max_proposal_content_size, + min_proposal_grace_epochs, + } +} + +/// Try to find an alias for a given address from the wallet. If not found, +/// formats the address into a string. +fn lookup_alias(ctx: &Context, addr: &Address) -> String { + match ctx.wallet.find_alias(addr) { + Some(alias) => format!("{}", alias), + None => format!("{}", addr), + } +} diff --git a/apps/src/lib/client/tx.rs b/apps/src/lib/client/tx.rs index 49d90a586d..7e94ac8e8f 100644 --- a/apps/src/lib/client/tx.rs +++ b/apps/src/lib/client/tx.rs @@ -2,7 +2,8 @@ use std::borrow::Cow; use std::env; use std::fs::File; -use async_std::io::{self, WriteExt}; +use async_std::io::prelude::WriteExt; +use async_std::io::{self}; use borsh::BorshSerialize; use itertools::Either::*; use namada::ledger::governance::storage as gov_storage; @@ -15,7 +16,6 @@ use namada::types::governance::{ use namada::types::key::{self, *}; use namada::types::nft::{self, Nft, NftToken}; use namada::types::storage::{Epoch, Key}; -use namada::types::token::Amount; use namada::types::transaction::governance::{ InitProposalData, VoteProposalData, }; @@ -314,6 +314,8 @@ pub async fn submit_init_validator( } let tx_code = ctx.read_wasm(TX_INIT_VALIDATOR_WASM); + let eth_hot_key_public = + key::secp256k1::PublicKey::try_from_pk(ð_hot_key.ref_to()).unwrap(); let data = InitValidator { account_key, consensus_key: consensus_key.ref_to(), @@ -321,10 +323,7 @@ pub async fn submit_init_validator( ð_cold_key.ref_to(), ) .unwrap(), - eth_hot_key: key::secp256k1::PublicKey::try_from_pk( - ð_hot_key.ref_to(), - ) - .unwrap(), + eth_hot_key: eth_hot_key_public.clone(), rewards_account_key, protocol_key, dkg_key, @@ -332,6 +331,11 @@ pub async fn submit_init_validator( rewards_vp_code, }; let data = data.try_to_vec().expect("Encoding tx data shouldn't fail"); + eprintln!("ETH HOT KEY: {:#?}", eth_hot_key_public); + eprintln!( + "ETH HOT KEY BORSH: {:#?}", + eth_hot_key_public.try_to_vec().unwrap() + ); let tx = Tx::new(tx_code, Some(data)); let (mut ctx, initialized_accounts) = process_tx(ctx, &tx_args, tx, Some(&source)).await; @@ -410,15 +414,11 @@ pub async fn submit_init_validator( }; // add validator address and keys to the wallet ctx.wallet - .add_validator_data(validator_address.clone(), validator_keys); + .add_validator_data(validator_address, validator_keys); ctx.wallet.save().unwrap_or_else(|err| eprintln!("{}", err)); let tendermint_home = ctx.config.ledger.tendermint_dir(); - tendermint_node::write_validator_key( - &tendermint_home, - &validator_address, - &consensus_key, - ); + tendermint_node::write_validator_key(&tendermint_home, &consensus_key); tendermint_node::write_validator_state(tendermint_home); println!(); @@ -599,7 +599,68 @@ pub async fn submit_init_proposal(mut ctx: Context, args: args::InitProposal) { let proposal: Proposal = serde_json::from_reader(file).expect("JSON was not well-formatted"); + let client = HttpClient::new(args.tx.ledger_address.clone()).unwrap(); + let signer = WalletAddress::new(proposal.clone().author.to_string()); + let governance_parameters = rpc::get_governance_parameters(&client).await; + let current_epoch = rpc::query_epoch(args::Query { + ledger_address: args.tx.ledger_address.clone(), + }) + .await; + + if proposal.voting_start_epoch <= current_epoch + || proposal.voting_start_epoch.0 + % governance_parameters.min_proposal_period + != 0 + { + println!("{}", proposal.voting_start_epoch <= current_epoch); + println!( + "{}", + proposal.voting_start_epoch.0 + % governance_parameters.min_proposal_period + == 0 + ); + eprintln!( + "Invalid proposal start epoch: {} must be greater than current \ + epoch {} and a multiple of {}", + proposal.voting_start_epoch, + current_epoch, + governance_parameters.min_proposal_period + ); + if !args.tx.force { + safe_exit(1) + } + } else if proposal.voting_end_epoch <= proposal.voting_start_epoch + || proposal.voting_end_epoch.0 - proposal.voting_start_epoch.0 + < governance_parameters.min_proposal_period + || proposal.voting_end_epoch.0 - proposal.voting_start_epoch.0 + > governance_parameters.max_proposal_period + || proposal.voting_end_epoch.0 % 3 != 0 + { + eprintln!( + "Invalid proposal end epoch: difference between proposal start \ + and end epoch must be at least {} and at max {} and end epoch \ + must be a multiple of {}", + governance_parameters.min_proposal_period, + governance_parameters.max_proposal_period, + governance_parameters.min_proposal_period + ); + if !args.tx.force { + safe_exit(1) + } + } else if proposal.grace_epoch <= proposal.voting_end_epoch + || proposal.grace_epoch.0 - proposal.voting_end_epoch.0 + < governance_parameters.min_proposal_grace_epochs + { + eprintln!( + "Invalid proposal grace epoch: difference between proposal grace \ + and end epoch must be at least {}", + governance_parameters.min_proposal_grace_epochs + ); + if !args.tx.force { + safe_exit(1) + } + } if args.offline { let signer = ctx.get(&signer); @@ -611,11 +672,18 @@ pub async fn submit_init_proposal(mut ctx: Context, args: args::InitProposal) { .await; let offline_proposal = OfflineProposal::new(proposal, signer, &signing_key); - let proposal_filename = "proposal".to_string(); + let proposal_filename = args + .proposal_data + .parent() + .expect("No parent found") + .join("proposal"); let out = File::create(&proposal_filename).unwrap(); match serde_json::to_writer_pretty(out, &offline_proposal) { Ok(_) => { - println!("Proposal created: {}.", proposal_filename); + println!( + "Proposal created: {}.", + proposal_filename.to_string_lossy() + ); } Err(e) => { eprintln!("Error while creating proposal file: {}.", e); @@ -623,8 +691,6 @@ pub async fn submit_init_proposal(mut ctx: Context, args: args::InitProposal) { } } } else { - let client = HttpClient::new(args.tx.ledger_address.clone()).unwrap(); - let tx_data: Result = proposal.clone().try_into(); let init_proposal_data = if let Ok(data) = tx_data { data @@ -633,35 +699,23 @@ pub async fn submit_init_proposal(mut ctx: Context, args: args::InitProposal) { safe_exit(1) }; - let min_proposal_funds_key = gov_storage::get_min_proposal_fund_key(); - let min_proposal_funds: Amount = - rpc::query_storage_value(&client, &min_proposal_funds_key) - .await - .unwrap(); let balance = rpc::get_token_balance(&client, &m1t(), &proposal.author) .await .unwrap_or_default(); - if balance < min_proposal_funds { + if balance + < token::Amount::from(governance_parameters.min_proposal_fund) + { eprintln!( "Address {} doesn't have enough funds.", &proposal.author ); safe_exit(1); } - let min_proposal_funds_key = gov_storage::get_min_proposal_fund_key(); - let min_proposal_funds: Amount = - rpc::query_storage_value(&client, &min_proposal_funds_key) - .await - .unwrap(); - let balance = rpc::get_token_balance(&client, &m1t(), &proposal.author) - .await - .unwrap_or_default(); - if balance < min_proposal_funds { - eprintln!( - "Address {} doesn't have enough funds.", - &proposal.author - ); + if init_proposal_data.content.len() + > governance_parameters.max_proposal_content_size as usize + { + eprintln!("Proposal content size too big.",); safe_exit(1); } @@ -715,12 +769,17 @@ pub async fn submit_vote_proposal(mut ctx: Context, args: args::VoteProposal) { &signing_key, ); - let proposal_vote_filename = - format!("proposal-vote-{}", &signer.to_string()); + let proposal_vote_filename = proposal_file_path + .parent() + .expect("No parent found") + .join(format!("proposal-vote-{}", &signer.to_string())); let out = File::create(&proposal_vote_filename).unwrap(); match serde_json::to_writer_pretty(out, &offline_vote) { Ok(_) => { - println!("Proposal vote created: {}.", proposal_vote_filename); + println!( + "Proposal vote created: {}.", + proposal_vote_filename.to_string_lossy() + ); } Err(e) => { eprintln!("Error while creating proposal vote file: {}.", e); @@ -729,6 +788,10 @@ pub async fn submit_vote_proposal(mut ctx: Context, args: args::VoteProposal) { } } else { let client = HttpClient::new(args.tx.ledger_address.clone()).unwrap(); + let current_epoch = rpc::query_epoch(args::Query { + ledger_address: args.tx.ledger_address.clone(), + }) + .await; let voter_address = ctx.get(signer); let proposal_id = args.proposal_id.unwrap(); @@ -742,6 +805,17 @@ pub async fn submit_vote_proposal(mut ctx: Context, args: args::VoteProposal) { match proposal_start_epoch { Some(epoch) => { + if current_epoch < epoch { + eprintln!( + "Current epoch {} is not greater than proposal start \ + epoch {}", + current_epoch, epoch + ); + + if !args.tx.force { + safe_exit(1) + } + } let mut delegation_addresses = rpc::get_delegators_delegation( &client, &voter_address, @@ -773,6 +847,8 @@ pub async fn submit_vote_proposal(mut ctx: Context, args: args::VoteProposal) { .await; } + println!("{:?}", delegation_addresses); + let tx_data = VoteProposalData { id: proposal_id, vote: args.vote, @@ -789,7 +865,13 @@ pub async fn submit_vote_proposal(mut ctx: Context, args: args::VoteProposal) { process_tx(ctx, &args.tx, tx, Some(signer)).await; } None => { - eprintln!("Proposal start epoch is not in the storage.") + eprintln!( + "Proposal start epoch for proposal id {} is not definied.", + proposal_id + ); + if !args.tx.force { + safe_exit(1) + } } } } @@ -957,7 +1039,7 @@ pub async fn submit_unbond(ctx: Context, args: args::Unbond) { Some(bonds) => { let mut bond_amount: token::Amount = 0.into(); for bond in bonds.iter() { - for delta in bond.deltas.values() { + for delta in bond.pos_deltas.values() { bond_amount += *delta; } } diff --git a/apps/src/lib/client/utils.rs b/apps/src/lib/client/utils.rs index ba2336414a..de2cd18c0b 100644 --- a/apps/src/lib/client/utils.rs +++ b/apps/src/lib/client/utils.rs @@ -1,4 +1,4 @@ -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use std::env; use std::fs::{self, File, OpenOptions}; use std::io::Write; @@ -25,12 +25,9 @@ use crate::config::genesis::genesis_config::{ self, HexString, ValidatorPreGenesisConfig, }; use crate::config::global::GlobalConfig; -use crate::config::{ - self, Config, IntentGossiper, PeerAddress, TendermintMode, -}; +use crate::config::{self, Config, TendermintMode}; use crate::facade::tendermint::node::Id as TendermintNodeId; use crate::facade::tendermint_config::net::Address as TendermintAddress; -use crate::node::gossip; use crate::node::ledger::tendermint_node; use crate::wallet::{pre_genesis, Wallet}; use crate::wasm_loader; @@ -53,6 +50,7 @@ pub async fn join_network( chain_id, genesis_validator, pre_genesis_path, + dont_prefetch_wasm, }: args::JoinNetwork, ) { use tokio::fs; @@ -125,15 +123,6 @@ pub async fn join_network( None } }); - if let Some(wasm_dir) = wasm_dir.as_ref() { - if wasm_dir.is_absolute() { - eprintln!( - "The arg `--wasm-dir` cannot be an absolute path. It is \ - nested inside the chain directory." - ); - cli::safe_exit(1); - } - } let release_filename = format!("{}.tar.gz", chain_id); let release_url = format!( @@ -269,10 +258,10 @@ pub async fn join_network( let genesis_file_path = base_dir.join(format!("{}.toml", chain_id.as_str())); - let mut wallet = - Wallet::load_or_new_from_genesis(&chain_dir, move || { - genesis_config::open_genesis_config(genesis_file_path) - }); + let mut wallet = Wallet::load_or_new_from_genesis( + &chain_dir, + genesis_config::open_genesis_config(genesis_file_path).unwrap(), + ); let address = wallet .find_address(&validator_alias) @@ -290,7 +279,6 @@ pub async fn join_network( // Write consensus key to tendermint home tendermint_node::write_validator_key( &tm_home_dir, - &address, &*pre_genesis_wallet.consensus_key, ); @@ -345,10 +333,31 @@ pub async fn join_network( .await .unwrap(); } + if !dont_prefetch_wasm { + fetch_wasms_aux(&base_dir, &chain_id).await; + } println!("Successfully configured for chain ID {}", chain_id); } +pub async fn fetch_wasms( + global_args: args::Global, + args::FetchWasms { chain_id }: args::FetchWasms, +) { + fetch_wasms_aux(&global_args.base_dir, &chain_id).await; +} + +pub async fn fetch_wasms_aux(base_dir: &Path, chain_id: &ChainId) { + println!("Fetching wasms for chain ID {}...", chain_id); + let wasm_dir = { + let mut path = base_dir.to_owned(); + path.push(chain_id.as_str()); + path.push("wasm"); + path + }; + wasm_loader::pre_fetch_wasm(&wasm_dir).await; +} + /// Length of a Tendermint Node ID in bytes const TENDERMINT_NODE_ID_LENGTH: usize = 20; @@ -391,7 +400,8 @@ pub fn init_network( archive_dir, }: args::InitNetwork, ) { - let mut config = genesis_config::open_genesis_config(&genesis_path); + let mut config = + genesis_config::open_genesis_config(&genesis_path).unwrap(); // Update the WASM checksums let checksums = @@ -422,25 +432,10 @@ pub fn init_network( let mut rng: ThreadRng = thread_rng(); + // Accumulator of validators' Tendermint P2P addresses let mut persistent_peers: Vec = Vec::with_capacity(config.validator.len()); - // Intent gossiper config bootstrap peers where we'll add the address for - // each validator's node - let mut seed_peers: HashSet = - HashSet::with_capacity(config.validator.len()); - let mut gossiper_configs: HashMap = - HashMap::with_capacity(config.validator.len()); - let mut matchmaker_configs: HashMap = - HashMap::with_capacity(config.validator.len()); - // Other accounts owned by one of the validators - let mut validator_owned_accounts: HashMap< - String, - genesis_config::EstablishedAccountConfig, - > = HashMap::default(); - - // We need a temporary copy to be able to use this inside the validator - // loop, which has mutable borrow on the config. - let established_accounts = config.established.clone(); + // Iterate over each validator, generating keys and addresses config.validator.iter_mut().for_each(|(name, config)| { let validator_dir = accounts_dir.join(name); @@ -477,36 +472,6 @@ pub fn init_network( )) .expect("Validator address must be valid"); persistent_peers.push(peer); - // Add a Intent gossiper bootstrap peer from the validator's IP - let mut gossiper_config = IntentGossiper::default(); - // Generate P2P identity - let p2p_identity = gossip::p2p::Identity::gen(&chain_dir); - let peer_id = p2p_identity.peer_id(); - let ledger_addr = - SocketAddr::from_str(config.net_address.as_ref().unwrap()).unwrap(); - let ip = ledger_addr.ip().to_string(); - let first_port = ledger_addr.port(); - let intent_peer_address = libp2p::Multiaddr::from_str( - format!("/ip4/{}/tcp/{}", ip, first_port + 3).as_str(), - ) - .unwrap(); - - gossiper_config.address = if localhost { - intent_peer_address.clone() - } else { - libp2p::Multiaddr::from_str( - format!("/ip4/0.0.0.0/tcp/{}", first_port + 3).as_str(), - ) - .unwrap() - }; - if let Some(discover) = gossiper_config.discover_peer.as_mut() { - // Disable mDNS local network peer discovery on the validator nodes - discover.mdns = false; - } - let intent_peer = PeerAddress { - address: intent_peer_address, - peer_id, - }; // Generate account and reward addresses let address = address::gen_established_address("validator account"); @@ -533,11 +498,7 @@ pub fn init_network( ); // Write consensus key for Tendermint - tendermint_node::write_validator_key( - &tm_home_dir, - &address, - &keypair, - ); + tendermint_node::write_validator_key(&tm_home_dir, &keypair); keypair.ref_to() }); @@ -669,93 +630,20 @@ pub fn init_network( wallet.add_address(name.clone(), address); wallet.add_address(format!("{}-reward", &name), reward_address); - // Check if there's a matchmaker configured for this validator node - match ( - &config.matchmaker_account, - &config.matchmaker_code, - &config.matchmaker_tx, - ) { - (Some(account), Some(mm_code), Some(tx_code)) => { - if config.intent_gossip_seed.unwrap_or_default() { - eprintln!("A bootstrap node cannot run matchmakers"); - cli::safe_exit(1) - } - match established_accounts.as_ref().and_then(|e| e.get(account)) - { - Some(matchmaker) => { - let mut matchmaker = matchmaker.clone(); - - init_established_account( - account, - &mut wallet, - &mut matchmaker, - unsafe_dont_encrypt, - ); - validator_owned_accounts - .insert(account.clone(), matchmaker); - - let matchmaker_config = config::Matchmaker { - matchmaker_path: Some(mm_code.clone().into()), - tx_code_path: Some(tx_code.clone().into()), - }; - matchmaker_configs - .insert(name.clone(), matchmaker_config); - } - None => { - eprintln!( - "Misconfigured validator's matchmaker. No \ - established account with alias {} found", - account - ); - cli::safe_exit(1) - } - } - } - (None, None, None) => {} - _ => { - eprintln!( - "Misconfigured validator's matchmaker. \ - `matchmaker_account`, `matchmaker_code` and \ - `matchmaker_tx` must be all or none present." - ); - cli::safe_exit(1) - } - } - - // Store the gossip config - gossiper_configs.insert(name.clone(), gossiper_config); - if config.intent_gossip_seed.unwrap_or_default() { - seed_peers.insert(intent_peer); - } - wallet.save().unwrap(); }); - if seed_peers.is_empty() && config.validator.len() > 1 { - tracing::warn!( - "At least 1 validator with `intent_gossip_seed = true` is needed \ - to established connection between the intent gossiper nodes" - ); - } - // Create a wallet for all accounts other than validators let mut wallet = Wallet::load_or_new(&accounts_dir.join(NET_OTHER_ACCOUNTS_DIR)); if let Some(established) = &mut config.established { established.iter_mut().for_each(|(name, config)| { - match validator_owned_accounts.get(name) { - Some(validator_owned) => { - *config = validator_owned.clone(); - } - None => { - init_established_account( - name, - &mut wallet, - config, - unsafe_dont_encrypt, - ); - } - } + init_established_account( + name, + &mut wallet, + config, + unsafe_dont_encrypt, + ); }) } @@ -810,26 +698,6 @@ pub fn init_network( let genesis_path = global_args .base_dir .join(format!("{}.toml", chain_id.as_str())); - let wasm_dir = global_args - .wasm_dir - .as_ref() - .cloned() - .or_else(|| { - if let Ok(wasm_dir) = env::var(ENV_VAR_WASM_DIR) { - let wasm_dir: PathBuf = wasm_dir.into(); - Some(wasm_dir) - } else { - None - } - }) - .unwrap_or_else(|| config::DEFAULT_WASM_DIR.into()); - if wasm_dir.is_absolute() { - eprintln!( - "The arg `--wasm-dir` cannot be an absolute path. It is nested \ - inside the chain directory." - ); - cli::safe_exit(1); - } // Write the genesis file genesis_config::write_genesis_config(&config_clean, &genesis_path); @@ -846,7 +714,7 @@ pub fn init_network( fs::rename(&temp_dir, &chain_dir).unwrap(); // Copy the WASM checksums - let wasm_dir_full = chain_dir.join(&wasm_dir); + let wasm_dir_full = chain_dir.join(&config::DEFAULT_WASM_DIR); fs::create_dir_all(&wasm_dir_full).unwrap(); fs::copy( &wasm_checksums_path, @@ -872,7 +740,7 @@ pub fn init_network( .unwrap(); // Copy the WASM checksums - let wasm_dir_full = validator_chain_dir.join(&wasm_dir); + let wasm_dir_full = validator_chain_dir.join(&config::DEFAULT_WASM_DIR); fs::create_dir_all(&wasm_dir_full).unwrap(); fs::copy( &wasm_checksums_path, @@ -892,7 +760,7 @@ pub fn init_network( wallet.save().unwrap(); }); - // Generate the validators' ledger and intent gossip config + // Generate the validators' ledger config config.validator.iter_mut().enumerate().for_each( |(ix, (name, validator_config))| { let accounts_dir = chain_dir.join(NET_ACCOUNTS_DIR); @@ -928,6 +796,7 @@ pub fn init_network( consensus_timeout_commit; config.ledger.tendermint.p2p_allow_duplicate_ip = allow_duplicate_ip; + config.ledger.tendermint.p2p_addr_book_strict = !localhost; // Clear the net address from the config and use it to set ports let net_address = validator_config.net_address.take().unwrap(); let first_port = SocketAddr::from_str(&net_address).unwrap().port(); @@ -955,26 +824,6 @@ pub fn init_network( // Validator node should turned off peer exchange reactor config.ledger.tendermint.p2p_pex = false; - // Configure the intent gossiper, matchmaker (if any) and RPC - config.intent_gossiper = gossiper_configs.remove(name).unwrap(); - config.intent_gossiper.seed_peers = seed_peers.clone(); - config.matchmaker = - matchmaker_configs.remove(name).unwrap_or_default(); - config.intent_gossiper.rpc = Some(config::RpcServer { - address: SocketAddr::new( - IpAddr::V4(if localhost { - Ipv4Addr::new(127, 0, 0, 1) - } else { - Ipv4Addr::new(0, 0, 0, 0) - }), - first_port + 4, - ), - }); - config - .intent_gossiper - .matchmakers_server_addr - .set_port(first_port + 5); - config.write(&validator_dir, &chain_id, true).unwrap(); }, ); @@ -995,7 +844,6 @@ pub fn init_network( } config.ledger.tendermint.p2p_addr_book_strict = !localhost; config.ledger.genesis_time = genesis.genesis_time.into(); - config.intent_gossiper.seed_peers = seed_peers; config .write(&global_args.base_dir, &chain_id, true) .unwrap(); @@ -1210,7 +1058,8 @@ fn network_configs_url_prefix(chain_id: &ChainId) -> String { }) } -fn write_tendermint_node_key( +/// Write the node key into tendermint config dir. +pub fn write_tendermint_node_key( tm_home_dir: &Path, node_sk: common::SecretKey, ) -> common::PublicKey { diff --git a/apps/src/lib/config/genesis.rs b/apps/src/lib/config/genesis.rs index 288f0efbd4..6559804516 100644 --- a/apps/src/lib/config/genesis.rs +++ b/apps/src/lib/config/genesis.rs @@ -9,7 +9,6 @@ use derivative::Derivative; use namada::ledger::governance::parameters::GovParams; use namada::ledger::parameters::Parameters; use namada::ledger::pos::{GenesisValidator, PosParams}; -use namada::ledger::treasury::parameters::TreasuryParams; use namada::types::address::Address; #[cfg(not(feature = "dev"))] use namada::types::chain::ChainId; @@ -26,12 +25,12 @@ pub mod genesis_config { use std::path::Path; use std::str::FromStr; - use hex; + use data_encoding::HEXLOWER; + use eyre::Context; use namada::ledger::governance::parameters::GovParams; use namada::ledger::parameters::{EpochDuration, Parameters}; use namada::ledger::pos::types::BasisPoints; use namada::ledger::pos::{GenesisValidator, PosParams}; - use namada::ledger::treasury::parameters::TreasuryParams; use namada::types::address::Address; use namada::types::key::dkg_session_keys::DkgPublicKey; use namada::types::key::*; @@ -51,12 +50,12 @@ pub mod genesis_config { impl HexString { pub fn to_bytes(&self) -> Result, HexKeyError> { - let bytes = hex::decode(&self.0)?; + let bytes = HEXLOWER.decode(self.0.as_ref())?; Ok(bytes) } pub fn to_sha256_bytes(&self) -> Result<[u8; 32], HexKeyError> { - let bytes = hex::decode(&self.0)?; + let bytes = HEXLOWER.decode(self.0.as_ref())?; let slice = bytes.as_slice(); let array: [u8; 32] = slice.try_into()?; Ok(array) @@ -77,15 +76,15 @@ pub mod genesis_config { #[derive(Error, Debug)] pub enum HexKeyError { #[error("Invalid hex string: {0:?}")] - InvalidHexString(hex::FromHexError), + InvalidHexString(data_encoding::DecodeError), #[error("Invalid sha256 checksum: {0}")] InvalidSha256(TryFromSliceError), #[error("Invalid public key: {0}")] InvalidPublicKey(ParsePublicKeyError), } - impl From for HexKeyError { - fn from(err: hex::FromHexError) -> Self { + impl From for HexKeyError { + fn from(err: data_encoding::DecodeError) -> Self { Self::InvalidHexString(err) } } @@ -120,8 +119,6 @@ pub mod genesis_config { pub pos_params: PosParamsConfig, // Governance parameters pub gov_params: GovernanceParamsConfig, - // Treasury parameters - pub treasury_params: TreasuryParamasConfig, // Ethereum bridge config pub ethereum_bridge_params: Option, @@ -137,9 +134,12 @@ pub mod genesis_config { // Maximum size of proposal in kibibytes (KiB) // XXX: u64 doesn't work with toml-rs! pub max_proposal_code_size: u64, - // Proposal period length in epoch + // Minimum proposal period length in epochs // XXX: u64 doesn't work with toml-rs! pub min_proposal_period: u64, + // Maximum proposal period length in epochs + // XXX: u64 doesn't work with toml-rs! + pub max_proposal_period: u64, // Maximum number of characters in the proposal content // XXX: u64 doesn't work with toml-rs! pub max_proposal_content_size: u64, @@ -148,13 +148,6 @@ pub mod genesis_config { pub min_proposal_grace_epochs: u64, } - #[derive(Clone, Debug, Deserialize, Serialize)] - pub struct TreasuryParamasConfig { - // Maximum funds that can be moved from treasury in a single transfer - // XXX: u64 doesn't work with toml-rs! - pub max_proposal_fund_transfer: u64, - } - /// Validator pre-genesis configuration can be created with client utils /// `init-genesis-validator` command and added to a genesis for /// `init-network` cmd and that can be subsequently read by `join-network` @@ -197,16 +190,6 @@ pub mod genesis_config { pub staking_reward_vp: Option, // IP:port of the validator. (used in generation only) pub net_address: Option, - /// Matchmaker account's alias, if any - pub matchmaker_account: Option, - /// Path to a matchmaker WASM program, if any - pub matchmaker_code: Option, - /// Path to a transaction WASM code used by the matchmaker, if any - pub matchmaker_tx: Option, - /// Is this validator running a seed intent gossip node? A seed node is - /// not part of the gossipsub where intents are being propagated and - /// hence cannot run matchmakers - pub intent_gossip_seed: Option, /// Tendermint node key is used to derive Tendermint node ID for node /// authentication pub tendermint_node_key: Option, @@ -570,6 +553,7 @@ pub mod genesis_config { min_proposal_fund: config.gov_params.min_proposal_fund, max_proposal_code_size: config.gov_params.max_proposal_code_size, min_proposal_period: config.gov_params.min_proposal_period, + max_proposal_period: config.gov_params.max_proposal_period, max_proposal_content_size: config .gov_params .max_proposal_content_size, @@ -578,10 +562,6 @@ pub mod genesis_config { .min_proposal_grace_epochs, }; - let treasury_params = TreasuryParams { - max_proposal_fund_transfer: 10_000, - }; - let pos_params = PosParams { max_validator_slots: config.pos_params.max_validator_slots, pipeline_len: config.pos_params.pipeline_len, @@ -608,15 +588,27 @@ pub mod genesis_config { parameters, pos_params, gov_params, - treasury_params, }; genesis.init(); genesis } - pub fn open_genesis_config(path: impl AsRef) -> GenesisConfig { - let config_file = std::fs::read_to_string(path).unwrap(); - toml::from_str(&config_file).unwrap() + pub fn open_genesis_config( + path: impl AsRef, + ) -> color_eyre::eyre::Result { + let config_file = + std::fs::read_to_string(&path).wrap_err_with(|| { + format!( + "couldn't read genesis config file from {}", + path.as_ref().to_string_lossy() + ) + })?; + toml::from_str(&config_file).wrap_err_with(|| { + format!( + "couldn't parse TOML from {}", + path.as_ref().to_string_lossy() + ) + }) } pub fn write_genesis_config( @@ -628,7 +620,7 @@ pub mod genesis_config { } pub fn read_genesis_config(path: impl AsRef) -> Genesis { - load_genesis_config(open_genesis_config(path)) + load_genesis_config(open_genesis_config(path).unwrap()) } } @@ -643,7 +635,6 @@ pub struct Genesis { pub parameters: Parameters, pub pos_params: PosParams, pub gov_params: GovParams, - pub treasury_params: TreasuryParams, } impl Genesis { @@ -835,13 +826,6 @@ pub fn genesis() -> Genesis { public_key: Some(wallet::defaults::christel_keypair().ref_to()), storage: HashMap::default(), }; - let matchmaker = EstablishedAccount { - address: wallet::defaults::matchmaker_address(), - vp_code_path: vp_user_path.into(), - vp_sha256: Default::default(), - public_key: Some(wallet::defaults::matchmaker_keypair().ref_to()), - storage: HashMap::default(), - }; let implicit_accounts = vec![ImplicitAccount { public_key: wallet::defaults::daewon_keypair().ref_to(), }]; @@ -868,10 +852,6 @@ pub fn genesis() -> Genesis { default_key_tokens, ), ((&validator.account_key).into(), default_key_tokens), - ( - matchmaker.public_key.as_ref().unwrap().into(), - default_key_tokens, - ), ]); let token_accounts = address::tokens() .into_iter() @@ -885,13 +865,12 @@ pub fn genesis() -> Genesis { Genesis { genesis_time: DateTimeUtc::now(), validators: vec![validator], - established_accounts: vec![albert, bertha, christel, matchmaker], + established_accounts: vec![albert, bertha, christel], implicit_accounts, token_accounts, parameters, pos_params: PosParams::default(), gov_params: GovParams::default(), - treasury_params: TreasuryParams::default(), } } diff --git a/apps/src/lib/config/mod.rs b/apps/src/lib/config/mod.rs index bd9484e141..8e56efa0b8 100644 --- a/apps/src/lib/config/mod.rs +++ b/apps/src/lib/config/mod.rs @@ -5,21 +5,15 @@ pub mod genesis; pub mod global; pub mod utils; -use std::collections::HashSet; -use std::fmt::Display; use std::fs::{create_dir_all, File}; use std::io::Write; use std::net::{IpAddr, Ipv4Addr, SocketAddr}; use std::path::{Path, PathBuf}; use std::str::FromStr; -use libp2p::multiaddr::{Multiaddr, Protocol}; -use libp2p::multihash::Multihash; -use libp2p::PeerId; use namada::types::chain::ChainId; use namada::types::time::Rfc3339String; -use regex::Regex; -use serde::{de, Deserialize, Serialize}; +use serde::{Deserialize, Serialize}; use thiserror::Error; use crate::cli; @@ -28,7 +22,7 @@ use crate::facade::tendermint_config::net::Address as TendermintAddress; /// Base directory contains global config and chain directories. pub const DEFAULT_BASE_DIR: &str = ".anoma"; -/// Default WASM dir. Note that WASM dirs are nested in chain dirs. +/// Default WASM dir. pub const DEFAULT_WASM_DIR: &str = "wasm"; /// The WASM checksums file contains the hashes of built WASMs. It is inside the /// WASM dir. @@ -44,9 +38,6 @@ pub const DB_DIR: &str = "db"; pub struct Config { pub wasm_dir: PathBuf, pub ledger: Ledger, - pub intent_gossiper: IntentGossiper, - // TODO allow to configure multiple matchmakers - pub matchmaker: Matchmaker, } #[derive(Clone, Debug, Serialize, Deserialize)] @@ -128,32 +119,6 @@ pub struct Tendermint { pub instrumentation_namespace: String, } -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct IntentGossiper { - // Simple values - pub address: Multiaddr, - pub topics: HashSet, - /// The server address to which matchmakers can connect to receive intents - pub matchmakers_server_addr: SocketAddr, - - // Nested structures ⚠️ no simple values below any of these ⚠️ - pub subscription_filter: SubscriptionFilter, - pub seed_peers: HashSet, - pub rpc: Option, - pub discover_peer: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct RpcServer { - pub address: SocketAddr, -} - -#[derive(Default, Debug, Serialize, Deserialize, Clone)] -pub struct Matchmaker { - pub matchmaker_path: Option, - pub tx_code_path: Option, -} - impl Ledger { pub fn new( base_dir: impl AsRef, @@ -231,38 +196,6 @@ impl Shell { } } -// TODO maybe add also maxCount for a maximum number of subscription for a -// filter. - -// TODO toml failed to serialize without "untagged" because does not support -// enum with nested data, unless with the untagged flag. This might be a source -// of confusion in the future... Another approach would be to have multiple -// field for each filter possibility but it's less nice. -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(untagged)] -pub enum SubscriptionFilter { - RegexFilter(#[serde(with = "serde_regex")] Regex), - WhitelistFilter(Vec), -} - -// TODO peer_id can be part of Multiaddr, mayby this splitting is not useful ? -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone)] -pub struct PeerAddress { - pub address: Multiaddr, - pub peer_id: PeerId, -} - -// TODO add reserved_peers: explicit peers for gossipsub network, to not be -// added to kademlia -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct DiscoverPeer { - pub max_discovery_peers: u64, - /// Toggle Kademlia remote peer discovery, on by default - pub kademlia: bool, - /// Toggle local network mDNS peer discovery, off by default - pub mdns: bool, -} - #[derive(Error, Debug)] pub enum Error { #[error("Error while reading config: {0}")] @@ -305,8 +238,6 @@ impl Config { Self { wasm_dir: DEFAULT_WASM_DIR.into(), ledger: Ledger::new(base_dir, chain_id, mode), - intent_gossiper: IntentGossiper::default(), - matchmaker: Matchmaker::default(), } } @@ -415,97 +346,6 @@ impl Config { } } -impl Default for IntentGossiper { - fn default() -> Self { - Self { - address: Multiaddr::from_str("/ip4/0.0.0.0/tcp/26659").unwrap(), - topics: vec!["asset_v0"].into_iter().map(String::from).collect(), - matchmakers_server_addr: SocketAddr::new( - IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), - 26661, - ), - subscription_filter: SubscriptionFilter::RegexFilter( - Regex::new("asset_v\\d{1,2}").unwrap(), - ), - seed_peers: HashSet::default(), - rpc: None, - discover_peer: Some(DiscoverPeer::default()), - } - } -} - -impl IntentGossiper { - pub fn update(&mut self, addr: Option, rpc: Option) { - if let Some(addr) = addr { - self.address = addr; - } - if let Some(address) = rpc { - self.rpc = Some(RpcServer { address }); - } - } -} - -impl Default for RpcServer { - fn default() -> Self { - Self { - address: SocketAddr::new( - IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), - 26660, - ), - } - } -} - -impl Serialize for PeerAddress { - fn serialize( - &self, - serializer: S, - ) -> std::result::Result - where - S: serde::Serializer, - { - let mut address = self.address.clone(); - address.push(Protocol::P2p(Multihash::from(self.peer_id))); - address.serialize(serializer) - } -} - -impl de::Error for SerdeError { - fn custom(msg: T) -> Self { - SerdeError::Message(msg.to_string()) - } -} - -impl<'de> Deserialize<'de> for PeerAddress { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - use serde::de::Error; - - let mut address = Multiaddr::deserialize(deserializer) - .map_err(|err| SerdeError::BadBootstrapPeerFormat(err.to_string())) - .map_err(D::Error::custom)?; - if let Some(Protocol::P2p(mh)) = address.pop() { - let peer_id = PeerId::from_multihash(mh).unwrap(); - Ok(Self { address, peer_id }) - } else { - Err(SerdeError::BadBootstrapPeerFormat(address.to_string())) - .map_err(D::Error::custom) - } - } -} - -impl Default for DiscoverPeer { - fn default() -> Self { - Self { - max_discovery_peers: 16, - kademlia: true, - mdns: false, - } - } -} - pub const VALUE_AFTER_TABLE_ERROR_MSG: &str = r#" Error while serializing to toml. It means that some nested structure is followed by simple fields. diff --git a/apps/src/lib/mod.rs b/apps/src/lib/mod.rs index 8ba2e59855..d8ab71236c 100644 --- a/apps/src/lib/mod.rs +++ b/apps/src/lib/mod.rs @@ -10,7 +10,6 @@ pub mod client; pub mod config; pub mod logging; pub mod node; -pub mod proto; pub mod wallet; pub mod wasm_loader; diff --git a/apps/src/lib/node/gossip/intent_gossiper.rs b/apps/src/lib/node/gossip/intent_gossiper.rs deleted file mode 100644 index 2c7816a5bf..0000000000 --- a/apps/src/lib/node/gossip/intent_gossiper.rs +++ /dev/null @@ -1,123 +0,0 @@ -use std::net::ToSocketAddrs; -use std::sync::{Arc, RwLock}; - -use namada::proto::{Intent, IntentId}; - -use super::mempool::IntentMempool; -use super::rpc::matchmakers::{ - MsgFromClient, MsgFromServer, ServerDialer, ServerListener, -}; - -/// A server for connected matchmakers that can receive intents from the intent -/// gossiper node and send back the results from their filter, if any, or from -/// trying to match them. -#[derive(Debug, Default)] -pub struct MatchmakersServer { - /// A node listener and its abort receiver. These are consumed once the - /// listener is started with [`MatchmakersServer::listen`]. - listener: Option, - /// Known intents mempool, shared with [`IntentGossiper`]. - mempool: Arc>, -} - -/// Intent gossiper handle can be cloned and is thread safe. -#[derive(Clone, Debug)] -pub struct IntentGossiper { - /// Known intents mempool, shared with [`MatchmakersServer`]. - mempool: Arc>, - /// A dialer can send messages to the connected matchmaker - dialer: ServerDialer, -} - -impl MatchmakersServer { - /// Create a new gossip intent app with a matchmaker, if enabled. - pub fn new_pair( - matchmakers_server_addr: impl ToSocketAddrs, - ) -> (Self, IntentGossiper) { - // Prepare a server for matchmakers connections - let (listener, dialer) = - ServerListener::new_pair(matchmakers_server_addr); - - let mempool = Arc::new(RwLock::new(IntentMempool::default())); - let intent_gossiper = IntentGossiper { - mempool: mempool.clone(), - dialer, - }; - ( - Self { - listener: Some(listener), - mempool, - }, - intent_gossiper, - ) - } - - pub async fn listen(mut self) { - self.listener - .take() - .unwrap() - .listen(|msg| match msg { - MsgFromClient::InvalidIntent { id } => { - let id = IntentId(id); - // Remove matched intents from mempool - tracing::info!("Removing matched intent ID {}", id); - let mut w_mempool = self.mempool.write().unwrap(); - w_mempool.remove(&id); - } - MsgFromClient::IntentConstraintsTooComplex { id } => { - let id = IntentId(id); - tracing::info!( - "Intent ID {} has constraints that are too complex \ - for a connected matchmaker", - id - ); - } - MsgFromClient::IgnoredIntent { id } => { - let id = IntentId(id); - tracing::info!( - "Intent ID {} ignored by a connected matchmaker", - id - ); - } - MsgFromClient::Matched { intent_ids } => { - // Remove matched intents from mempool - let mut w_mempool = self.mempool.write().unwrap(); - for id in intent_ids { - let id = IntentId(id); - tracing::info!("Removing matched intent ID {}", id); - w_mempool.remove(&id); - } - } - MsgFromClient::Unmatched { id } => { - let id = IntentId(id); - tracing::info!("No match found for intent ID {}", id); - } - }) - .await - } -} - -impl IntentGossiper { - // Apply the logic to a new intent. It only tries to apply the matchmaker if - // this one exists. If no matchmaker then returns true. - pub async fn add_intent(&mut self, intent: Intent) { - let id = intent.id(); - - let r_mempool = self.mempool.read().unwrap(); - let is_known = r_mempool.contains(&id); - drop(r_mempool); - if !is_known { - let mut w_mempool = self.mempool.write().unwrap(); - w_mempool.insert(intent.clone()); - } - - tracing::info!( - "Sending intent ID {} to connected matchmakers, if any", - id - ); - self.dialer.send(MsgFromServer::AddIntent { - id: id.0, - data: intent.data, - }) - } -} diff --git a/apps/src/lib/node/gossip/mempool.rs b/apps/src/lib/node/gossip/mempool.rs deleted file mode 100644 index fce66447c4..0000000000 --- a/apps/src/lib/node/gossip/mempool.rs +++ /dev/null @@ -1,26 +0,0 @@ -use std::collections::HashMap; - -use namada::proto::{Intent, IntentId}; - -/// In-memory intent mempool -#[derive(Clone, Debug, Default)] -pub struct IntentMempool(HashMap); - -impl IntentMempool { - /// Insert a new intent. If the mempool didn't have this intent present, - /// returns `true`. - pub fn insert(&mut self, intent: Intent) -> bool { - self.0.insert(intent.id(), intent).is_none() - } - - /// Remove an intent from mempool. If the mempool didn't have this intent - /// present, returns `true`. in the mempool. - pub fn remove(&mut self, intent_id: &IntentId) -> bool { - self.0.remove(intent_id).is_some() - } - - /// Returns `true` if the map contains intent with specified ID. - pub fn contains(&self, intent_id: &IntentId) -> bool { - self.0.contains_key(intent_id) - } -} diff --git a/apps/src/lib/node/gossip/mod.rs b/apps/src/lib/node/gossip/mod.rs deleted file mode 100644 index 03b14f14ef..0000000000 --- a/apps/src/lib/node/gossip/mod.rs +++ /dev/null @@ -1,116 +0,0 @@ -pub mod intent_gossiper; -mod mempool; -pub mod p2p; -pub mod rpc; - -use std::path::Path; - -use namada::proto::Intent; -use thiserror::Error; -use tokio::sync::mpsc; - -use self::intent_gossiper::IntentGossiper; -use self::p2p::P2P; -use crate::config; -use crate::proto::services::{rpc_message, RpcResponse}; - -#[derive(Error, Debug)] -pub enum Error { - #[error("Error initializing p2p: {0}")] - P2pInit(p2p::Error), -} - -type Result = std::result::Result; - -/// RPC async receiver end of the channel -pub type RpcReceiver = tokio::sync::mpsc::Receiver<( - rpc_message::Message, - tokio::sync::oneshot::Sender, -)>; - -#[tokio::main] -pub async fn run( - config: config::IntentGossiper, - base_dir: impl AsRef, -) -> Result<()> { - // Prepare matchmakers server and dialer - let (matchmakers_server, intent_gossiper) = - intent_gossiper::MatchmakersServer::new_pair( - &config.matchmakers_server_addr, - ); - - // Async channel for intents received from peer - let (peer_intent_send, peer_intent_recv) = tokio::sync::mpsc::channel(100); - - // Create the P2P gossip network, which can send messages directly to the - // matchmaker, if any - let p2p = p2p::P2P::new(&config, base_dir, peer_intent_send) - .await - .map_err(Error::P2pInit)?; - - // Run the matchmakers server - let mms_join_handle = tokio::task::spawn(async move { - matchmakers_server.listen().await; - }); - - // Start the RPC server, if enabled in the config - let rpc_receiver = config.rpc.map(|rpc_config| { - let (rpc_sender, rpc_receiver) = mpsc::channel(100); - tokio::spawn(async move { - rpc::client::start_rpc_server(&rpc_config, rpc_sender).await - }); - rpc_receiver - }); - - dispatcher( - p2p, - rpc_receiver, - peer_intent_recv, - intent_gossiper, - mms_join_handle, - ) - .await -} - -// loop over all possible event. The event can be from the rpc, a matchmaker -// program or the gossip network. The gossip network event are a special case -// that does not need to be handle as it's taking care of by the libp2p internal -// logic. -pub async fn dispatcher( - mut p2p: P2P, - mut rpc_receiver: Option, - mut peer_intent_recv: tokio::sync::mpsc::Receiver, - mut intent_gossiper: IntentGossiper, - _mms_join_handle: tokio::task::JoinHandle<()>, -) -> Result<()> { - loop { - tokio::select! { - Some((event, inject_response)) = recv_rpc_option(rpc_receiver.as_mut()), if rpc_receiver.is_some() => - { - let gossip_sub = &mut p2p.0.behaviour_mut().intent_gossip_behaviour; - let (response, maybe_intent) = rpc::client::handle_rpc_event(event, gossip_sub).await; - inject_response.send(response).expect("failed to send response to rpc server"); - - if let Some(intent) = maybe_intent { - intent_gossiper.add_intent(intent).await; - } - }, - Some(intent) = peer_intent_recv.recv() => { - intent_gossiper.add_intent(intent).await; - } - swarm_event = p2p.0.next() => { - // Never occurs, but call for the event must exists. - tracing::info!("event, {:?}", swarm_event); - }, - }; - } -} - -async fn recv_rpc_option( - x: Option<&mut RpcReceiver>, -) -> Option<( - rpc_message::Message, - tokio::sync::oneshot::Sender, -)> { - x?.recv().await -} diff --git a/apps/src/lib/node/gossip/p2p/behaviour/discovery.rs b/apps/src/lib/node/gossip/p2p/behaviour/discovery.rs deleted file mode 100644 index 57d99c7bc0..0000000000 --- a/apps/src/lib/node/gossip/p2p/behaviour/discovery.rs +++ /dev/null @@ -1,517 +0,0 @@ -// This file is almost identical to this -// https://github.com/webb-tools/anonima/blob/main/network/src/discovery.rs -// appropriate affiliation needs to be added here original header : -// -// Copyright 2020 ChainSafe Systems SPDX-License-Identifier: Apache-2.0, MIT - -use std::collections::{HashSet, VecDeque}; -use std::fmt::Display; -use std::task::{Context, Poll}; -use std::time::Duration; -use std::{cmp, io}; - -use async_std::stream::{self, Interval}; -use futures::StreamExt; -use libp2p::core::connection::{ConnectionId, ListenerId}; -use libp2p::core::ConnectedPoint; -use libp2p::kad::handler::KademliaHandlerProto; -use libp2p::kad::store::MemoryStore; -use libp2p::kad::{Kademlia, KademliaConfig, KademliaEvent, QueryId}; -use libp2p::mdns::{Mdns, MdnsConfig, MdnsEvent}; -use libp2p::swarm::toggle::{Toggle, ToggleIntoProtoHandler}; -use libp2p::swarm::{ - IntoProtocolsHandler, NetworkBehaviour, NetworkBehaviourAction, - PollParameters, ProtocolsHandler, -}; -use libp2p::{Multiaddr, PeerId}; -use thiserror::Error; - -use crate::config::PeerAddress; - -#[derive(Error, Debug)] -pub enum Error { - // TODO, it seems that NoKnownPeer is not exposed, could not find it - #[error("Failed to bootstrap kademlia {0}")] - FailedBootstrap(String), - #[error("Failed to initialize mdns {0}")] - FailedMdns(std::io::Error), -} - -pub type Result = std::result::Result; - -/// Event generated by the `DiscoveryBehaviour`. -#[derive(Debug)] -pub enum DiscoveryEvent { - /// Event that notifies that we connected to the node with the given peer - /// id. - Connected(PeerId), - - /// Event that notifies that we disconnected with the node with the given - /// peer id. - Disconnected(PeerId), - - /// This case is only use to clean the code in the poll fct - KademliaEvent(KademliaEvent), -} - -/// `DiscoveryBehaviour` configuration. -#[derive(Clone)] -pub struct DiscoveryConfig { - /// user defined peer that are given to kad in order to connect to the - /// network - user_defined: Vec, - /// maximum number of peer to connect to - discovery_max: u64, - /// enable kademlia to find new peer - enable_kademlia: bool, - /// look for new peer over local network. - // TODO: it seems that kademlia must activated where it should not be - // mandatory - enable_mdns: bool, - // TODO: should this be optional? if not explain why - /// use the option from kademlia. Prevent some type of attacks against - /// kademlia. - kademlia_disjoint_query_paths: bool, -} - -impl Default for DiscoveryConfig { - fn default() -> Self { - Self { - user_defined: Vec::new(), - discovery_max: u64::MAX, - enable_kademlia: true, - enable_mdns: true, - kademlia_disjoint_query_paths: true, - } - } -} - -#[derive(Default)] -pub struct DiscoveryConfigBuilder { - config: DiscoveryConfig, -} - -impl DiscoveryConfigBuilder { - /// Set the number of active connections at which we pause discovery. - pub fn discovery_limit(&mut self, limit: u64) -> &mut Self { - self.config.discovery_max = limit; - self - } - - /// Set custom nodes which never expire, e.g. bootstrap or reserved nodes. - pub fn with_user_defined(&mut self, user_defined: I) -> &mut Self - where - I: IntoIterator, - { - self.config.user_defined.extend(user_defined); - self - } - - /// Configures if disjoint query path is enabled - pub fn use_kademlia_disjoint_query_paths( - &mut self, - value: bool, - ) -> &mut Self { - self.config.kademlia_disjoint_query_paths = value; - self - } - - /// Configures if mdns is enabled. - pub fn with_mdns(&mut self, value: bool) -> &mut Self { - self.config.enable_mdns = value; - self - } - - /// Configures if Kademlia is enabled. - pub fn with_kademlia(&mut self, value: bool) -> &mut Self { - self.config.enable_kademlia = value; - self - } - - /// Build the discovery config - pub fn build(&self) -> Result { - Ok(self.config.clone()) - } -} - -/// Implementation of `NetworkBehaviour` that discovers the nodes on the -/// network. -pub struct DiscoveryBehaviour { - /// User-defined list of nodes and their addresses. Typically includes - /// bootstrap nodes and reserved nodes. - user_defined: Vec, - /// Kademlia discovery. - pub kademlia: Toggle>, - /// Discovers nodes on the local network. - mdns: Toggle, - /// Stream that fires when we need to perform the next random Kademlia - /// query. - next_kad_random_query: Option, - /// After `next_kad_random_query` triggers, the next one triggers after - /// this duration. - duration_to_next_kad: Duration, - /// Events to return in priority when polled. - pending_events: VecDeque, - /// Number of nodes we're currently connected to. - num_connections: u64, - /// Keeps hash set of peers connected. - peers: HashSet, - /// Number of active connections to pause discovery on. - discovery_max: u64, -} - -impl Display for DiscoveryBehaviour { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str( - format!( - "{{ - user_defined {:?}, - kademlia: {:?}, - mdns: {:?}, - next_kad_random_query: {:?}, - duration_to_next_kad {:?}, - num_connection: {:?}, - peers: {:?}, - discovery_max: {:?} -}}", - self.user_defined, - self.kademlia.is_enabled(), - self.mdns.is_enabled(), - self.next_kad_random_query, - self.duration_to_next_kad, - self.num_connections, - self.peers, - self.discovery_max - ) - .as_str(), - ) - } -} - -impl DiscoveryBehaviour { - /// Create a `DiscoveryBehaviour` from a config. - pub async fn new( - local_peer_id: PeerId, - config: DiscoveryConfig, - ) -> Result { - let DiscoveryConfig { - user_defined, - discovery_max, - enable_kademlia, - enable_mdns, - kademlia_disjoint_query_paths, - } = config; - - let mut peers = HashSet::with_capacity(user_defined.len()); - - // Kademlia config - let kademlia_opt = if enable_kademlia { - let store = MemoryStore::new(local_peer_id.to_owned()); - let mut kad_config = KademliaConfig::default(); - kad_config.disjoint_query_paths(kademlia_disjoint_query_paths); - // TODO: choose a better protocol name - kad_config.set_protocol_name( - "/anoma/kad/anoma/kad/1.0.0".as_bytes().to_vec(), - ); - - let mut kademlia = - Kademlia::with_config(local_peer_id, store, kad_config); - - user_defined - .iter() - .for_each(|PeerAddress { address, peer_id }| { - kademlia.add_address(peer_id, address.clone()); - peers.insert(*peer_id); - }); - - // TODO: For production should node fail when kad failed to - // bootstrap? - if let Err(err) = kademlia.bootstrap() { - tracing::error!("failed to bootstrap kad : {:?}", err); - }; - Some(kademlia) - } else { - None - }; - - let mdns_opt = if enable_mdns { - Some( - Mdns::new(MdnsConfig::default()) - .await - .map_err(Error::FailedMdns)?, - ) - } else { - None - }; - - Ok(DiscoveryBehaviour { - user_defined, - kademlia: kademlia_opt.into(), - mdns: mdns_opt.into(), - next_kad_random_query: None, - duration_to_next_kad: Duration::from_secs(1), - pending_events: VecDeque::new(), - num_connections: 0, - peers, - discovery_max, - }) - } -} - -// Most function here are a wrapper around kad behaviour, -impl NetworkBehaviour for DiscoveryBehaviour { - type OutEvent = DiscoveryEvent; - type ProtocolsHandler = - ToggleIntoProtoHandler>; - - fn new_handler(&mut self) -> Self::ProtocolsHandler { - self.kademlia.new_handler() - } - - /// Look for the address of a peer first in the user defined list then in - /// kademlia then lastly in the local network. Sum all possible address and - /// returns. - fn addresses_of_peer(&mut self, peer_id: &PeerId) -> Vec { - let mut list = self - .user_defined - .iter() - .filter_map(|peer_address| { - if &peer_address.peer_id == peer_id { - Some(peer_address.address.clone()) - } else { - None - } - }) - .collect::>(); - - list.extend(self.kademlia.addresses_of_peer(peer_id)); - - list.extend(self.mdns.addresses_of_peer(peer_id)); - - list - } - - fn inject_connected(&mut self, peer_id: &PeerId) { - tracing::debug!("Injecting connected peer {}", peer_id); - self.peers.insert(*peer_id); - self.pending_events - .push_back(DiscoveryEvent::Connected(*peer_id)); - - self.kademlia.inject_connected(peer_id) - } - - fn inject_disconnected(&mut self, peer_id: &PeerId) { - tracing::debug!("Injecting disconnected peer {}", peer_id); - self.peers.remove(peer_id); - self.pending_events - .push_back(DiscoveryEvent::Disconnected(*peer_id)); - - self.kademlia.inject_disconnected(peer_id) - } - - fn inject_connection_established( - &mut self, - peer_id: &PeerId, - conn: &ConnectionId, - endpoint: &ConnectedPoint, - ) { - tracing::debug!( - "Injecting connection established for peer ID {} with endpoint \ - {:#?}", - peer_id, - endpoint - ); - self.num_connections += 1; - - self.kademlia - .inject_connection_established(peer_id, conn, endpoint) - } - - fn inject_connection_closed( - &mut self, - peer_id: &PeerId, - conn: &ConnectionId, - endpoint: &ConnectedPoint, - ) { - tracing::debug!("Injecting connection closed for peer ID {}", peer_id); - self.num_connections -= 1; - - self.kademlia - .inject_connection_closed(peer_id, conn, endpoint) - } - - fn inject_address_change( - &mut self, - peer: &PeerId, - id: &ConnectionId, - old: &ConnectedPoint, - new: &ConnectedPoint, - ) { - self.kademlia.inject_address_change(peer, id, old, new) - } - - fn inject_event( - &mut self, - peer_id: PeerId, - connection: ConnectionId, - event: <::Handler as ProtocolsHandler>::OutEvent, - ) { - self.kademlia.inject_event(peer_id, connection, event) - } - - fn inject_addr_reach_failure( - &mut self, - peer_id: Option<&PeerId>, - addr: &Multiaddr, - error: &dyn std::error::Error, - ) { - self.kademlia - .inject_addr_reach_failure(peer_id, addr, error) - } - - fn inject_dial_failure(&mut self, peer_id: &PeerId) { - self.kademlia.inject_dial_failure(peer_id) - } - - fn inject_new_listen_addr(&mut self, id: ListenerId, addr: &Multiaddr) { - self.kademlia.inject_new_listen_addr(id, addr) - } - - fn inject_expired_listen_addr(&mut self, id: ListenerId, addr: &Multiaddr) { - self.kademlia.inject_expired_listen_addr(id, addr); - } - - fn inject_listener_error( - &mut self, - id: ListenerId, - err: &(dyn std::error::Error + 'static), - ) { - self.kademlia.inject_listener_error(id, err) - } - - fn inject_listener_closed( - &mut self, - id: ListenerId, - reason: std::result::Result<(), &io::Error>, - ) { - self.kademlia.inject_listener_closed(id, reason) - } - - fn inject_new_external_addr(&mut self, addr: &Multiaddr) { - self.kademlia.inject_new_external_addr(addr) - } - - // This poll function is called by libp2p to fetch/generate new event. First - // in the local queue then in kademlia and lastly in Mdns. - #[allow(clippy::type_complexity)] - fn poll( - &mut self, - cx: &mut Context, - params: &mut impl PollParameters, - ) -> Poll< - NetworkBehaviourAction< - <::Handler as ProtocolsHandler>::InEvent, - Self::OutEvent, - >, - >{ - // Immediately process the content of `discovered`. - if let Some(ev) = self.pending_events.pop_front() { - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)); - } - - // Poll Kademlia return every other event except kad event - while let Poll::Ready(ev) = self.kademlia.poll(cx, params) { - tracing::debug!("Kademlia event {:#?}", ev); - if let NetworkBehaviourAction::GenerateEvent(_kad_ev) = ev { - } else { - return Poll::Ready(ev.map_out(DiscoveryEvent::KademliaEvent)); - } - } - - // Poll the stream that fires when we need to start a random Kademlia - // query. When the stream provides a new value then it tries to look for - // a node and connect to it. - // TODO: explain a bit more the logic happening here - if let Some(next_kad_random_query) = self.next_kad_random_query.as_mut() - { - tracing::debug!( - "Kademlia random query {:#?}", - next_kad_random_query - ); - while next_kad_random_query.poll_next_unpin(cx).is_ready() { - if self.num_connections < self.discovery_max { - let random_peer_id = PeerId::random(); - tracing::debug!( - "Libp2p <= Starting random Kademlia request for {:?}", - random_peer_id - ); - if let Some(k) = self.kademlia.as_mut() { - k.get_closest_peers(random_peer_id); - } - } - - *next_kad_random_query = - stream::interval(self.duration_to_next_kad); - self.duration_to_next_kad = cmp::min( - self.duration_to_next_kad * 2, - Duration::from_secs(60), - ); - } - } - - // Poll mdns. If mdns generated new Discovered event then connect to it - // TODO: refactor this function, it can't be done as the kad done - while let Poll::Ready(ev) = self.mdns.poll(cx, params) { - match ev { - NetworkBehaviourAction::GenerateEvent(event) => match event { - MdnsEvent::Discovered(list) => { - if self.num_connections < self.discovery_max { - // Add any discovered peers to Kademlia - for (peer_id, multiaddr) in list { - if let Some(kad) = self.kademlia.as_mut() { - kad.add_address(&peer_id, multiaddr); - } - } - } else { - tracing::info!( - "max reached {:?}, {:?}", - self.num_connections, - self.discovery_max - ); - // Already over discovery max, don't add discovered - // peers. We could potentially buffer these - // addresses to be added later, but mdns is not an - // important use case and may be removed in future. - } - } - MdnsEvent::Expired(_) => {} - }, - NetworkBehaviourAction::DialAddress { address } => { - return Poll::Ready(NetworkBehaviourAction::DialAddress { - address, - }); - } - NetworkBehaviourAction::DialPeer { peer_id, condition } => { - return Poll::Ready(NetworkBehaviourAction::DialPeer { - peer_id, - condition, - }); - } - // Nothing to notify handler - NetworkBehaviourAction::NotifyHandler { .. } => {} - NetworkBehaviourAction::ReportObservedAddr { - address, - score, - } => { - return Poll::Ready( - NetworkBehaviourAction::ReportObservedAddr { - address, - score, - }, - ); - } - } - } - Poll::Pending - } -} diff --git a/apps/src/lib/node/gossip/p2p/behaviour/mod.rs b/apps/src/lib/node/gossip/p2p/behaviour/mod.rs deleted file mode 100644 index f70a300b9e..0000000000 --- a/apps/src/lib/node/gossip/p2p/behaviour/mod.rs +++ /dev/null @@ -1,412 +0,0 @@ -mod discovery; -use std::collections::hash_map::DefaultHasher; -use std::convert::TryFrom; -use std::hash::{Hash, Hasher}; -use std::time::Duration; - -use libp2p::gossipsub::subscription_filter::regex::RegexSubscriptionFilter; -use libp2p::gossipsub::subscription_filter::{ - TopicSubscriptionFilter, WhitelistSubscriptionFilter, -}; -use libp2p::gossipsub::{ - self, GossipsubEvent, GossipsubMessage, IdentTopic, IdentityTransform, - MessageAcceptance, MessageAuthenticity, MessageId, TopicHash, - ValidationMode, -}; -use libp2p::identify::{Identify, IdentifyConfig, IdentifyEvent}; -use libp2p::identity::Keypair; -use libp2p::ping::{Ping, PingEvent, PingFailure, PingSuccess}; -use libp2p::swarm::NetworkBehaviourEventProcess; -use libp2p::{NetworkBehaviour, PeerId}; -use namada::proto::{self, Intent, IntentGossipMessage}; -use thiserror::Error; -use tokio::sync::mpsc::Sender; - -use self::discovery::DiscoveryEvent; -use crate::config; -use crate::node::gossip::p2p::behaviour::discovery::{ - DiscoveryBehaviour, DiscoveryConfigBuilder, -}; - -/// Behaviour is composed of a `DiscoveryBehaviour` and an GossipsubBehaviour`. -/// It automatically connect to newly discovered peer, except specified -/// otherwise, and propagates intents to other peers. -#[derive(NetworkBehaviour)] -pub struct Behaviour { - pub intent_gossip_behaviour: Gossipsub, - pub discover_behaviour: DiscoveryBehaviour, - /// The identify protocol allows establishing P2P connections via Kademlia - identify: Identify, - /// Responds to inbound pings and periodically sends outbound pings on - /// every established connection - ping: Ping, - #[behaviour(ignore)] - pub peer_intent_send: Sender, -} - -#[derive(Error, Debug)] -pub enum Error { - #[error("Failed to subscribe")] - FailedSubscription(libp2p::gossipsub::error::SubscriptionError), - #[error("Failed initializing the topic filter: {0}")] - Filter(String), - #[error("Failed initializing the gossip behaviour: {0}")] - GossipConfig(String), - #[error("Failed on the the discovery behaviour config: {0}")] - DiscoveryConfig(String), - #[error("Failed initializing the discovery behaviour: {0}")] - Discovery(discovery::Error), - #[error("Failed initializing mdns: {0}")] - Mdns(std::io::Error), -} - -pub type Gossipsub = libp2p::gossipsub::Gossipsub< - IdentityTransform, - IntentGossipSubscriptionFilter, ->; - -// TODO merge type of config and this one ? Maybe not a good idea -// TODO extends with MaxSubscribionFilter -/// IntentGossipSubscriptionfilter is a wrapper of TopicSubscriptionFilter to -/// allows combination of any sort of filter. -pub enum IntentGossipSubscriptionFilter { - RegexFilter(RegexSubscriptionFilter), - WhitelistFilter(WhitelistSubscriptionFilter), -} - -/// IntentGossipEvent describe events received/sent in the gossipsub network. -/// All information are extracted from the GossipsubEvent type. This type is -/// used as a wrapper of GossipsubEvent in order to have only information of -/// interest and possibly enforce some invariant. -#[derive(Debug)] -pub struct IntentGossipEvent { - /// The PeerId that initially created this message - pub propagation_source: PeerId, - /// The MessageId of this message. This MessageId allows to discriminate - /// already received message - pub message_id: MessageId, - // TODO maybe remove the Option of this field to make mandatory to have an - // id. - /// The peer that transmitted this message to us. It can be anonymous - pub source: Option, - /// The content of the data - pub data: Vec, - /// The topic from which we received the message - pub topic: TopicHash, -} - -impl From for IntentGossipEvent { - /// Transforme a GossipsubEvent into an IntentGossipEvent. This function - /// fails if the gossipsubEvent does not contain a GossipsubMessage. - fn from(event: GossipsubEvent) -> Self { - if let GossipsubEvent::Message { - propagation_source, - message_id, - message: - GossipsubMessage { - source, - data, - topic, - sequence_number: _, - }, - } = event - { - Self { - propagation_source, - message_id, - source, - data, - topic, - } - } else { - panic!("Expected a GossipsubEvent::Message got {:?}", event) - } - } -} - -impl TopicSubscriptionFilter for IntentGossipSubscriptionFilter { - /// tcheck that the proposed topic can be subscribed - fn can_subscribe(&mut self, topic_hash: &TopicHash) -> bool { - match self { - IntentGossipSubscriptionFilter::RegexFilter(filter) => { - filter.can_subscribe(topic_hash) - } - IntentGossipSubscriptionFilter::WhitelistFilter(filter) => { - filter.can_subscribe(topic_hash) - } - } - } -} - -/// [message_id] use the hash of the message data as an id -pub fn message_id(message: &GossipsubMessage) -> MessageId { - let mut hasher = DefaultHasher::new(); - message.data.hash(&mut hasher); - MessageId::from(hasher.finish().to_string()) -} - -impl Behaviour { - /// Create a new behaviour based on the config given - pub async fn new( - key: Keypair, - config: &config::IntentGossiper, - peer_intent_send: Sender, - ) -> Self { - let public_key = key.public(); - let peer_id = PeerId::from_public_key(public_key.clone()); - - // TODO remove hardcoded value and add them to the config Except - // validation_mode, protocol_id_prefix, message_id_fn and - // validate_messages - // Set a custom gossipsub for our use case - let gossipsub_config = gossipsub::GossipsubConfigBuilder::default() - .protocol_id_prefix("intent_gossip") - .heartbeat_interval(Duration::from_secs(1)) - .validation_mode(ValidationMode::Strict) - .message_id_fn(message_id) - .max_transmit_size(16 * 1024 * 1024) - .validate_messages() - .mesh_outbound_min(1) - // TODO bootstrap peers should not be part of the mesh, so all the - // `.mesh` args should be set to 0 https://github.com/libp2p/specs/blob/70d7fda47dda88d828b4db72775c1602de57e91b/pubsub/gossipsub/gossipsub-v1.1.md#recommendations-for-network-operators - .mesh_n_low(2) - .mesh_n(3) - .mesh_n_high(6) - .build() - .unwrap(); - - let filter = match &config.subscription_filter { - crate::config::SubscriptionFilter::RegexFilter(regex) => { - IntentGossipSubscriptionFilter::RegexFilter( - RegexSubscriptionFilter(regex.clone()), - ) - } - crate::config::SubscriptionFilter::WhitelistFilter(topics) => { - IntentGossipSubscriptionFilter::WhitelistFilter( - WhitelistSubscriptionFilter( - topics - .iter() - .map(IdentTopic::new) - .map(TopicHash::from) - .collect(), - ), - ) - } - }; - - let mut intent_gossip_behaviour: Gossipsub = - Gossipsub::new_with_subscription_filter( - MessageAuthenticity::Signed(key), - gossipsub_config, - filter, - ) - .unwrap(); - - // subscribe to all topic listed in the config. - config - .topics - .iter() - .try_for_each(|topic| { - intent_gossip_behaviour - .subscribe(&IdentTopic::new(topic)) - .map_err(Error::FailedSubscription) - // it returns bool signifying if it was already subscribed. - // discard because it can't be false as the config.topics is - // a hash set - .map(|_| ()) - }) - .expect("failed to subscribe to topic"); - - let discover_behaviour = { - // TODO: check that bootstrap_peers are in multiaddr (otherwise it - // fails silently) - let discover_config = - if let Some(discover_config) = &config.discover_peer { - DiscoveryConfigBuilder::default() - .with_user_defined(config.seed_peers.clone()) - .discovery_limit(discover_config.max_discovery_peers) - .with_kademlia(discover_config.kademlia) - .with_mdns(discover_config.mdns) - .use_kademlia_disjoint_query_paths(true) - .build() - .unwrap() - } else { - DiscoveryConfigBuilder::default().build().unwrap() - }; - DiscoveryBehaviour::new(peer_id, discover_config) - .await - .unwrap() - }; - Self { - intent_gossip_behaviour, - discover_behaviour, - identify: Identify::new(IdentifyConfig::new( - "anoma/id/anoma/id/1.0.0".into(), - public_key, - )), - ping: Ping::default(), - peer_intent_send, - } - } - - /// tries to apply a new intent. Fails if the logic fails or if the intent - /// is rejected. If the matchmaker fails the message is only ignore - fn handle_intent(&mut self, intent: Intent) -> MessageAcceptance { - if let Err(err) = self.peer_intent_send.try_send(intent) { - tracing::error!("Error sending intent to the matchmaker: {}", err); - // The buffer is full or the channel is closed - return MessageAcceptance::Ignore; - } - MessageAcceptance::Accept - } - - /// Tries to decoded the arbitrary data in an intent then call - /// [Self::handle_intent]. fails if the data does not contains an intent - fn handle_raw_intent( - &mut self, - data: impl AsRef<[u8]>, - ) -> MessageAcceptance { - match IntentGossipMessage::try_from(data.as_ref()) { - Ok(message) => self.handle_intent(message.intent), - Err(proto::Error::NoIntentError) => { - tracing::info!("Empty message, rejecting it"); - MessageAcceptance::Reject - } - Err(proto::Error::IntentDecodingError(err)) => { - tracing::info!("error while decoding the intent: {:?}", err); - MessageAcceptance::Reject - } - _ => unreachable!(), - } - } -} - -impl NetworkBehaviourEventProcess for Behaviour { - /// When a new event is generated by the intent gossip behaviour - fn inject_event(&mut self, event: GossipsubEvent) { - tracing::info!("received a new message : {:?}", event); - match event { - GossipsubEvent::Message { - message, - propagation_source, - message_id, - } => { - // validity is the type of response return to the network - // (valid|reject|ignore) - let validity = self.handle_raw_intent(message.data); - self.intent_gossip_behaviour - .report_message_validation_result( - &message_id, - &propagation_source, - validity, - ) - .expect("Failed to validate the message"); - } - // When a peer subscribe to a new topic, this node also tries to - // connect to it using the filter defined in the config - GossipsubEvent::Subscribed { peer_id: _, topic } => { - // try to subscribe to the new topic - self.intent_gossip_behaviour - .subscribe(&IdentTopic::new(topic.into_string())) - .map_err(Error::FailedSubscription) - .unwrap_or_else(|e| { - tracing::error!("failed to subscribe: {:?}", e); - false - }); - } - // Nothing to do when you are informed that a peer unsubscribed to a - // topic. - // TODO: It could be interesting to unsubscribe to a topic when the - // node is not connected to anyone else. - GossipsubEvent::Unsubscribed { - peer_id: _, - topic: _, - } => {} - } - } -} - -impl NetworkBehaviourEventProcess for Behaviour { - // The logic is part of the DiscoveryBehaviour, nothing to do here. - fn inject_event(&mut self, event: DiscoveryEvent) { - match event { - DiscoveryEvent::Connected(peer) => { - tracing::info!("Connect to a new peer: {:?}", peer) - } - DiscoveryEvent::Disconnected(peer) => { - tracing::info!("Peer disconnected: {:?}", peer) - } - _ => {} - } - } -} - -impl NetworkBehaviourEventProcess for Behaviour { - fn inject_event(&mut self, event: IdentifyEvent) { - match event { - IdentifyEvent::Received { peer_id, info } => { - tracing::info!("Identified Peer {}", peer_id); - tracing::debug!("protocol_version {}", info.protocol_version); - tracing::debug!("agent_version {}", info.agent_version); - tracing::debug!("listening_addresses {:?}", info.listen_addrs); - tracing::debug!("observed_address {}", info.observed_addr); - tracing::debug!("protocols {:?}", info.protocols); - if let Some(kad) = self.discover_behaviour.kademlia.as_mut() { - // Only the first address is the public IP, the others - // seem to be private - if let Some(addr) = info.listen_addrs.first() { - tracing::info!( - "Routing updated peer ID: {}, address: {}", - peer_id, - addr - ); - let _update = kad.add_address(&peer_id, addr.clone()); - } - } - } - IdentifyEvent::Sent { .. } => (), - IdentifyEvent::Pushed { .. } => (), - IdentifyEvent::Error { peer_id, error } => { - tracing::error!( - "Error while attempting to identify the remote peer {}: \ - {},", - peer_id, - error - ); - } - } - } -} - -impl NetworkBehaviourEventProcess for Behaviour { - fn inject_event(&mut self, event: PingEvent) { - match event.result { - Ok(PingSuccess::Ping { rtt }) => { - tracing::debug!( - "PingSuccess::Ping rtt to {} is {} ms", - event.peer.to_base58(), - rtt.as_millis() - ); - } - Ok(PingSuccess::Pong) => { - tracing::debug!( - "PingSuccess::Pong from {}", - event.peer.to_base58() - ); - } - Err(PingFailure::Timeout) => { - tracing::warn!( - "PingFailure::Timeout {}", - event.peer.to_base58() - ); - } - Err(PingFailure::Other { error }) => { - tracing::warn!( - "PingFailure::Other {}: {}", - event.peer.to_base58(), - error - ); - } - } - } -} diff --git a/apps/src/lib/node/gossip/p2p/identity.rs b/apps/src/lib/node/gossip/p2p/identity.rs deleted file mode 100644 index 42442054c8..0000000000 --- a/apps/src/lib/node/gossip/p2p/identity.rs +++ /dev/null @@ -1,123 +0,0 @@ -use std::fs::OpenOptions; -use std::path::{Path, PathBuf}; - -use libp2p::identity::ed25519::Keypair; -use serde::{Deserialize, Serialize}; -use sha2::{Digest, Sha256}; - -use crate::cli; - -const P2P_KEY_PATH: &str = "gossiper-p2p-private-key.json"; - -/// ed255519 keypair + hash of public key. The keypair used to encrypted the -/// data send in the libp2p network. -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct Identity { - pub address: String, - #[serde(with = "keypair_serde")] - pub key: Keypair, -} - -// TODO this is needed because libp2p does not export ed255519 serde -// feature maybe a MR for libp2p to export theses functions ? -mod keypair_serde { - use libp2p::identity::ed25519::Keypair; - use serde::de::Error; - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - - pub fn serialize( - value: &Keypair, - serializer: S, - ) -> Result - where - S: Serializer, - { - let bytes = value.encode(); - let string = hex::encode(&bytes[..]); - string.serialize(serializer) - } - pub fn deserialize<'d, D>(deserializer: D) -> Result - where - D: Deserializer<'d>, - { - let string = String::deserialize(deserializer)?; - let mut bytes = hex::decode(&string).map_err(Error::custom)?; - Keypair::decode(bytes.as_mut()).map_err(Error::custom) - } -} - -impl Identity { - /// Generates a new gossiper keypair and hash. - pub fn new() -> Self { - let key = Keypair::generate(); - let mut hasher = Sha256::new(); - hasher.update(key.public().encode()); - let address = format!("{:.40X}", hasher.finalize()); - Identity { address, key } - } - - /// Load identity from file or generate a new one if none found. - pub fn load_or_gen(base_dir: impl AsRef) -> Identity { - let file_path = Self::file_path(&base_dir); - match OpenOptions::new().read(true).open(&file_path) { - Ok(file) => { - let gossiper: Identity = serde_json::from_reader(file) - .expect("unexpected key encoding"); - gossiper - } - Err(err) => { - if let std::io::ErrorKind::NotFound = err.kind() { - tracing::info!( - "No P2P key found, generating a new one. This will be \ - written into {}", - file_path.to_string_lossy() - ); - Self::gen(base_dir) - } else { - eprintln!( - "Cannot read {}: {}", - file_path.to_string_lossy(), - err - ); - cli::safe_exit(1); - } - } - } - } - - /// Generate a new identity. - pub fn gen(base_dir: impl AsRef) -> Identity { - let file_path = Self::file_path(base_dir); - std::fs::create_dir_all(&file_path.parent().unwrap()).unwrap(); - let file = OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&file_path) - .expect("Couldn't open P2P key file"); - let gossiper = Identity::new(); - serde_json::to_writer_pretty(file, &gossiper) - .expect("Couldn't write private validator key file"); - gossiper - } - - pub fn file_path(base_dir: impl AsRef) -> PathBuf { - base_dir.as_ref().join(P2P_KEY_PATH) - } - - pub fn peer_id(&self) -> libp2p::PeerId { - let pk = self.key.public(); - let pk = libp2p::identity::PublicKey::Ed25519(pk); - libp2p::PeerId::from(pk) - } - - pub fn key(&self) -> libp2p::identity::Keypair { - libp2p::identity::Keypair::Ed25519(self.key.clone()) - } -} - -impl Default for Identity { - fn default() -> Self { - Self::new() - } -} diff --git a/apps/src/lib/node/gossip/p2p/mod.rs b/apps/src/lib/node/gossip/p2p/mod.rs deleted file mode 100644 index 2c135bc995..0000000000 --- a/apps/src/lib/node/gossip/p2p/mod.rs +++ /dev/null @@ -1,139 +0,0 @@ -pub mod behaviour; -mod identity; - -use std::path::Path; -use std::time::Duration; - -use behaviour::Behaviour; -use libp2p::core::connection::ConnectionLimits; -use libp2p::core::muxing::StreamMuxerBox; -use libp2p::core::transport::Boxed; -use libp2p::dns::DnsConfig; -use libp2p::identity::Keypair; -use libp2p::swarm::SwarmBuilder; -use libp2p::tcp::TcpConfig; -use libp2p::websocket::WsConfig; -use libp2p::{core, mplex, noise, PeerId, Transport, TransportError}; -use namada::proto::Intent; -use thiserror::Error; -use tokio::sync::mpsc::Sender; - -pub use self::identity::Identity; -use crate::config; - -pub type Swarm = libp2p::Swarm; - -#[derive(Error, Debug)] -pub enum Error { - #[error("Failed initializing the transport: {0}")] - Transport(std::io::Error), - #[error("Error with the network behavior: {0}")] - Behavior(crate::node::gossip::p2p::behaviour::Error), - #[error("Error while dialing: {0}")] - Dialing(libp2p::swarm::DialError), - #[error("Error while starting to listing: {0}")] - Listening(TransportError), - #[error("Error decoding peer identity")] - BadPeerIdentity(TransportError), -} -type Result = std::result::Result; - -pub struct P2P(pub Swarm); - -impl P2P { - /// Create a new peer based on the configuration given. Used transport is - /// tcp. A peer participate in the intent gossip system and helps the - /// propagation of intents. - pub async fn new( - config: &config::IntentGossiper, - base_dir: impl AsRef, - peer_intent_send: Sender, - ) -> Result { - let identity = Identity::load_or_gen(base_dir); - let peer_key = identity.key(); - // Id of the node on the libp2p network derived from the public key - let peer_id = identity.peer_id(); - - tracing::info!("Peer id: {:?}", peer_id.clone()); - - let transport = build_transport(&peer_key).await; - - // create intent gossip specific behaviour - let intent_gossip_behaviour = - Behaviour::new(peer_key, config, peer_intent_send).await; - - let connection_limits = build_p2p_connections_limit(); - - // Swarm is - let mut swarm = - SwarmBuilder::new(transport, intent_gossip_behaviour, peer_id) - .connection_limits(connection_limits) - .notify_handler_buffer_size( - std::num::NonZeroUsize::new(20).expect("Not zero"), - ) - .connection_event_buffer_size(64) - .build(); - - swarm - .listen_on(config.address.clone()) - .map_err(Error::Listening)?; - - Ok(Self(swarm)) - } -} - -// TODO explain a bit the choice made here -/// Create transport used by libp2p. See -/// for more information on libp2p -/// transport -pub async fn build_transport( - peer_key: &Keypair, -) -> Boxed<(PeerId, StreamMuxerBox)> { - let transport = { - let tcp_transport = TcpConfig::new().nodelay(true); - let dns_tcp_transport = DnsConfig::system(tcp_transport).await.unwrap(); - let ws_dns_tcp_transport = WsConfig::new(dns_tcp_transport.clone()); - dns_tcp_transport.or_transport(ws_dns_tcp_transport) - }; - - let auth_config = { - let dh_keys = noise::Keypair::::new() - .into_authentic(peer_key) - .expect("Noise key generation failed. Should never happen."); - - noise::NoiseConfig::xx(dh_keys).into_authenticated() - }; - - let mplex_config = { - let mut mplex_config = mplex::MplexConfig::new(); - mplex_config.set_max_buffer_behaviour(mplex::MaxBufferBehaviour::Block); - mplex_config.set_max_buffer_size(usize::MAX); - - let mut yamux_config = libp2p::yamux::YamuxConfig::default(); - yamux_config - .set_window_update_mode(libp2p::yamux::WindowUpdateMode::on_read()); - // TODO: check if its enought - yamux_config.set_max_buffer_size(16 * 1024 * 1024); - yamux_config.set_receive_window_size(16 * 1024 * 1024); - - core::upgrade::SelectUpgrade::new(yamux_config, mplex_config) - }; - - transport - .upgrade(core::upgrade::Version::V1) - .authenticate(auth_config) - .multiplex(mplex_config) - .timeout(Duration::from_secs(20)) - .boxed() -} - -// TODO document choice made here -// TODO inject it in the configuration instead of hard-coding it ? -pub fn build_p2p_connections_limit() -> ConnectionLimits { - ConnectionLimits::default() - .with_max_pending_incoming(Some(10)) - .with_max_pending_outgoing(Some(30)) - .with_max_established_incoming(Some(25)) - .with_max_established_outgoing(Some(25)) - .with_max_established_per_peer(Some(5)) -} diff --git a/apps/src/lib/node/gossip/rpc/client.rs b/apps/src/lib/node/gossip/rpc/client.rs deleted file mode 100644 index f33d6add34..0000000000 --- a/apps/src/lib/node/gossip/rpc/client.rs +++ /dev/null @@ -1,166 +0,0 @@ -use std::convert::TryFrom; -use std::net::SocketAddr; - -use libp2p::gossipsub::IdentTopic; -use namada::proto::{Intent, IntentGossipMessage}; -use tokio::sync::mpsc::{self, Sender}; -use tokio::sync::oneshot; -use tonic::transport::Server; -use tonic::{Request as TonicRequest, Response as TonicResponse, Status}; - -use crate::config::RpcServer; -use crate::node::gossip::p2p::behaviour::Gossipsub; -use crate::proto::services::rpc_service_server::{ - RpcService, RpcServiceServer, -}; -use crate::proto::services::{rpc_message, RpcMessage, RpcResponse}; -use crate::proto::{IntentMessage, SubscribeTopicMessage}; - -#[derive(Debug)] -struct Rpc { - inject_message: - mpsc::Sender<(rpc_message::Message, oneshot::Sender)>, -} - -#[tonic::async_trait] -impl RpcService for Rpc { - async fn send_message( - &self, - request: TonicRequest, - ) -> Result, Status> { - if let RpcMessage { message: Some(msg) } = request.into_inner() { - let (sender, receiver) = oneshot::channel(); - self.inject_message - .send((msg, sender)) - .await - .map_err(|err| - Status::cancelled(format!{"failed to send message to gossip app: {:?}",err}) - )? - ; - let response = receiver.await.map_err(|err| - Status::data_loss(format!{"failed to receive response from gossip app: {:?}", err}))?; - Ok(TonicResponse::new(response)) - } else { - tracing::error!("Received empty rpc message, nothing can be done"); - Ok(TonicResponse::new(RpcResponse::default())) - } - } -} - -pub async fn rpc_server( - addr: SocketAddr, - inject_message: Sender<( - rpc_message::Message, - oneshot::Sender, - )>, -) -> Result<(), tonic::transport::Error> { - let rpc = Rpc { inject_message }; - let svc = RpcServiceServer::new(rpc); - Server::builder().add_service(svc).serve(addr).await -} - -/// Start a rpc server in it's own thread. The used address to listen is in the -/// `config` argument. All received event by the rpc are send to the channel -/// return by this function. -pub async fn start_rpc_server( - config: &RpcServer, - rpc_sender: mpsc::Sender<( - rpc_message::Message, - tokio::sync::oneshot::Sender, - )>, -) { - let addr = config.address; - tracing::info!("RPC started at {}", config.address); - rpc_server(addr, rpc_sender).await.unwrap(); -} - -pub async fn handle_rpc_event( - event: rpc_message::Message, - gossip_sub: &mut Gossipsub, -) -> (RpcResponse, Option) { - match event { - rpc_message::Message::Intent(message) => { - match IntentMessage::try_from(message) { - Ok(message) => { - // Send the intent to gossip - let gossip_message = - IntentGossipMessage::new(message.intent.clone()); - let intent_bytes = gossip_message.to_bytes(); - - let gossip_result = match gossip_sub - .publish(IdentTopic::new(message.topic), intent_bytes) - { - Ok(message_id) => { - format!( - "Intent published in intent gossiper with \ - message ID: {}", - message_id - ) - } - Err(err) => { - format!( - "Failed to publish intent in gossiper: {:?}", - err - ) - } - }; - ( - RpcResponse { - result: format!( - "Intent received. {}.", - gossip_result, - ), - }, - Some(message.intent), - ) - } - Err(err) => ( - RpcResponse { - result: format!("Error decoding intent: {:?}", err), - }, - None, - ), - } - } - rpc_message::Message::Dkg(dkg_msg) => { - tracing::debug!("dkg not yet implemented {:?}", dkg_msg); - ( - RpcResponse { - result: String::from( - "DKG application not yet - implemented", - ), - }, - None, - ) - } - rpc_message::Message::Topic(topic_message) => { - let topic = SubscribeTopicMessage::from(topic_message); - let topic = IdentTopic::new(&topic.topic); - ( - match gossip_sub.subscribe(&topic) { - Ok(true) => { - let result = format!("Node subscribed to {}", topic); - tracing::info!("{}", result); - RpcResponse { result } - } - Ok(false) => { - let result = - format!("Node already subscribed to {}", topic); - tracing::info!("{}", result); - RpcResponse { result } - } - Err(err) => { - let result = format!( - "failed to subscribe to {}: {:?}", - topic, err - ); - tracing::error!("{}", result); - RpcResponse { result } - } - }, - None, - ) - } - } -} diff --git a/apps/src/lib/node/gossip/rpc/matchmakers.rs b/apps/src/lib/node/gossip/rpc/matchmakers.rs deleted file mode 100644 index c4912c8b96..0000000000 --- a/apps/src/lib/node/gossip/rpc/matchmakers.rs +++ /dev/null @@ -1,847 +0,0 @@ -//! This module provides connection between an intent gossiper node (the server) -//! and matchmakers (clients) over WebSocket. -//! -//! Both the server and the client can asynchronously listen for new messages -//! and send messages to the other side. - -use std::collections::HashSet; -use std::fmt::Debug; -use std::net::{SocketAddr, ToSocketAddrs}; -use std::sync::atomic::{self, AtomicBool}; -use std::sync::{Arc, RwLock}; - -use borsh::{BorshDeserialize, BorshSerialize}; -use derivative::Derivative; -use message_io::network::{Endpoint, ResourceId, ToRemoteAddr, Transport}; -use message_io::node::{self, NodeHandler, NodeListener}; - -use crate::cli; - -/// Message from intent gossiper to a matchmaker -#[derive(Clone, Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize)] -pub enum MsgFromServer { - /// Try to match an intent - AddIntent { id: Vec, data: Vec }, -} - -/// Message from a matchmaker to intent gossiper -#[derive(Clone, Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize)] -pub enum MsgFromClient { - /// The intent is invalid and hence it shouldn't be gossiped - InvalidIntent { id: Vec }, - /// The intent constraints are too complex for this matchmaker, gossip it - IntentConstraintsTooComplex { id: Vec }, - /// The matchmaker doesn't care about this intent, gossip it - IgnoredIntent { id: Vec }, - /// Intents were matched into a tx. Remove the matched intents from mempool - /// if the tx gets applied. - Matched { intent_ids: HashSet> }, - /// An intent was accepted and added, but no match found yet. Gossip it - Unmatched { id: Vec }, -} - -/// Intent gossiper server listener handles connections from [`ClientDialer`]s. -#[derive(Derivative)] -#[derivative(Debug)] -pub struct ServerListener { - /// The address on which the server is listening - pub address: SocketAddr, - /// The accepted client connections, shared with the [`ServerDialer`] - clients: Arc>>, - /// A node listener and its abort receiver. These are consumed once the - /// listener is started with [`ServerListener::listen`]. - #[derivative(Debug = "ignore")] - listener: Option<(NodeListener<()>, tokio::sync::mpsc::Receiver<()>)>, -} - -/// Intent gossiper server dialer can send messages to the connected -/// [`ClientListener`]s. -#[derive(Clone, Derivative)] -#[derivative(Debug)] -pub struct ServerDialer { - /// The connection handler - #[derivative(Debug = "ignore")] - handler: NodeHandler<()>, - /// Connection resource ID - resource_id: ResourceId, - /// The accepted client connections, shared with the [`ServerListener`] - clients: Arc>>, - /// A message to abort the server must be sent to stop the - /// [`ServerListener`]. This message will be sent on [`ServerDialer`]'s - /// `drop` call. - abort_send: tokio::sync::mpsc::Sender<()>, -} - -/// Server events are used internally by the async [`ServerListener`]. -#[derive(Clone, Debug)] -enum ServerEvent { - /// New endpoint has been accepted by a listener and considered ready to - /// use. The event contains the resource id of the listener that - /// accepted this connection. - Accepted(Endpoint, ResourceId), - /// Input message received by the network. - Message(Endpoint, MsgFromClient), - /// This event is only dispatched when a connection is lost. - Disconnected(Endpoint), -} - -/// Matchmaker client listener handles a connection from [`ServerDialer`]. -#[derive(Derivative)] -#[derivative(Debug)] -pub struct ClientListener { - /// The connection handler - #[derivative(Debug = "ignore")] - handler: NodeHandler<()>, - /// The server connection endpoint - server: Endpoint, - /// The address on which the client is listening - local_addr: SocketAddr, - /// The client listener. This is consumed once the listener is started with - /// [`ClientListener::listen`]. - #[derivative(Debug = "ignore")] - listener: Option>, - /// Server connection status - is_connected: Arc, -} - -/// Matchmaker client dialer can send messages to the connected -/// [`ServerListener`]. -#[derive(Clone, Derivative)] -#[derivative(Debug)] -pub struct ClientDialer { - /// The address on which the client is listening - pub local_addr: SocketAddr, - /// The server address - server: Endpoint, - /// The connection handler - #[derivative(Debug = "ignore")] - handler: NodeHandler<()>, - /// Server connection status - is_connected: Arc, -} - -impl ServerListener { - /// Create a new intent gossiper node server. Returns a listener and - /// a dialer that can be used to send messages to clients and to shut down - /// the server. - pub fn new_pair(address: impl ToSocketAddrs) -> (Self, ServerDialer) { - let clients: Arc>> = Default::default(); - let (handler, listener) = node::split::<()>(); - - let (resource_id, address) = match handler - .network() - .listen(Transport::Ws, &address) - { - Ok((resource_id, real_addr)) => { - tracing::info!("Matchmakers server running at {}", real_addr); - (resource_id, real_addr) - } - Err(err) => { - eprintln!( - "The matchmakers server cannot listen at {:?}: {}", - address.to_socket_addrs().unwrap().collect::>(), - err - ); - cli::safe_exit(1); - } - }; - - let (abort_send, abort_recv) = tokio::sync::mpsc::channel::<()>(1); - - ( - Self { - address, - clients: clients.clone(), - listener: Some((listener, abort_recv)), - }, - ServerDialer { - handler, - clients, - resource_id, - abort_send, - }, - ) - } - - /// Start the server listener and call `on_msg` on every received message. - /// The listener can be stopped early by [`ServerDialer::shutdown`]. - pub async fn listen(mut self, mut on_msg: impl FnMut(MsgFromClient)) { - // Open a channel for events received from the async listener - let (send, mut recv) = tokio::sync::mpsc::unbounded_channel(); - - // This is safe because `listen` consumes `self` created by - // [`ServerListener::new_pair`] - let (listener, mut abort_recv) = self.listener.take().unwrap(); - - tracing::debug!("Starting intent gossiper matchmakers server..."); - - // Start the async listener that will send server events over the - // channel - let _task = listener.for_each_async(move |event| { - match event.network() { - message_io::network::NetEvent::Message( - endpoint, - mut msg_bytes, - ) => match MsgFromClient::deserialize(&mut msg_bytes) { - Ok(msg) => { - let _ = send.send(ServerEvent::Message(endpoint, msg)); - } - Err(err) => { - tracing::error!( - "Couldn't decode a msg from matchmaker {}: {}", - endpoint, - err - ); - } - }, - message_io::network::NetEvent::Accepted(endpoint, id) => { - tracing::info!( - "Accepted connection from matchmaker {}", - endpoint - ); - let _ = send.send(ServerEvent::Accepted(endpoint, id)); - } - message_io::network::NetEvent::Disconnected(endpoint) => { - tracing::info!("Matchmaker disconnected: {}", endpoint); - let _ = send.send(ServerEvent::Disconnected(endpoint)); - } - message_io::network::NetEvent::Connected(endpoint, status) => { - // Server only gets `NetEvent::Accepted` from connected - // clients - tracing::error!( - "Unexpected server `NetEvent::Connected` with \ - endpoint {}, status {}", - endpoint, - status - ); - } - } - }); - - tracing::debug!("Intent gossiper matchmakers server is ready."); - - // Process the server events - loop { - tokio::select! { - _ = abort_recv.recv() => { - tracing::debug!("Shutting down intent gossiper matchmakers server."); - return; - }, - event = recv.recv() => if let Some(event) = event { - match event { - ServerEvent::Message(endpoint, msg) => { - tracing::debug!( - "Received msg from matchmaker {}: {:?}", - endpoint, - msg - ); - on_msg(msg); - } - ServerEvent::Accepted(endpoint, _id) => { - let mut clients = self.clients.write().unwrap(); - if !clients.insert(endpoint) { - tracing::warn!( - "Accepted matchmaker already known {}", - endpoint - ) - } - } - ServerEvent::Disconnected(endpoint) => { - let mut clients = self.clients.write().unwrap(); - if !clients.remove(&endpoint) { - tracing::warn!( - "Disconnected matchmaker unknown endpoint {}", - endpoint - ) - } - } - } - } - } - } - } -} - -impl ServerDialer { - /// Broadcast a message to all connected matchmaker clients - pub fn send(&mut self, msg: MsgFromServer) { - let net = self.handler.network(); - for client in self.clients.read().unwrap().iter() { - let msg_bytes = msg.try_to_vec().unwrap(); - let status = net.send(*client, &msg_bytes); - tracing::info!( - "Sent msg {:?} to {} with status {:?}", - msg, - client, - status - ); - } - } - - /// Is the server listener ready to start handling incoming connections? - pub fn is_ready(&self) -> bool { - self.handler - .network() - .is_ready(self.resource_id) - .unwrap_or_default() - } - - /// Force shut-down the [`ServerListener`] associated with this dialer. - pub fn shutdown(&mut self) { - self.handler.stop(); - // Send a message to abort and ignore the result - let _ = self.abort_send.blocking_send(()); - } -} - -impl ClientListener { - /// Create a new matchmaker client. Returns a listener and a dialer that - /// can be used to send messages to the server and to shut down the client. - pub fn new_pair(server_addr: impl ToRemoteAddr) -> (Self, ClientDialer) { - let server_addr = server_addr.to_remote_addr().unwrap(); - // Not using message-io signals - let (handler, listener) = node::split::<()>(); - - let (server, local_addr) = match handler - .network() - .connect(Transport::Ws, server_addr.clone()) - { - Ok(res) => res, - Err(err) => { - eprintln!( - "Cannot listen at {} for matchmakers server: {}", - server_addr, err, - ); - cli::safe_exit(1); - } - }; - tracing::info!("Matchmaker client running at {}", local_addr); - - let is_connected = Arc::new(AtomicBool::new(false)); - - ( - Self { - server, - local_addr, - listener: Some(listener), - is_connected: is_connected.clone(), - handler: handler.clone(), - }, - ClientDialer { - server, - local_addr, - handler, - is_connected, - }, - ) - } - - /// Start the client listener and call `on_msg` on every received message. - /// The listener can be stopped early by [`ClientDialer::shutdown`]. - pub fn listen(mut self, mut on_msg: impl FnMut(MsgFromServer)) { - // This is safe because `listen` consumes `self` - let listener = self.listener.take().unwrap(); - - // Start the blocking listener that will call `on_msg` on every message - let server_addr = self.server.addr(); - let local_addr_port = self.local_addr.port(); - - tracing::debug!("Matchmakers client is ready."); - - listener.for_each(move |event| { - tracing::debug!("Client event {:#?}", event); - match event { - node::NodeEvent::Network(net_event) => match net_event { - message_io::network::NetEvent::Message( - endpoint, - mut msg_bytes, - ) => match MsgFromServer::deserialize(&mut msg_bytes) { - Ok(msg) => { - on_msg(msg); - } - Err(err) => { - tracing::error!( - "Couldn't decode a msg from intent gossiper \ - {}: {}", - endpoint, - err - ); - } - }, - message_io::network::NetEvent::Connected( - _endpoint, - established, - ) => { - if established { - tracing::info!( - "Connected to the server at {}. The client is \ - identified by local port: {}", - server_addr, - local_addr_port - ); - } else { - tracing::error!( - "Cannot connect to the server at {}", - server_addr - ) - } - self.is_connected - .store(established, atomic::Ordering::SeqCst); - } - message_io::network::NetEvent::Disconnected(endpoint) => { - tracing::info!("Disconnected from {}", endpoint); - self.is_connected - .store(false, atomic::Ordering::SeqCst); - // Exit on disconnect, a user of this client can - // implement retry logic - self.handler.stop(); - } - message_io::network::NetEvent::Accepted(endpoint, _) => { - // Client only gets `NetEvent::Connected` from connected - // clients - tracing::error!( - "Unexpected client `NetEvent::Accepted` with \ - endpoint {}", - endpoint - ); - } - }, - node::NodeEvent::Signal(()) => { - // unused - } - } - }); - - tracing::debug!("Matchmakers client is shutting down."); - } -} - -impl ClientDialer { - /// Send a message to the intent gossiper server - pub fn send(&mut self, msg: MsgFromClient) { - let net = self.handler.network(); - let msg_bytes = msg.try_to_vec().unwrap(); - let status = net.send(self.server, &msg_bytes); - tracing::info!( - "Sent msg {:?} to {} with status {:?}", - msg, - self.server, - status - ); - } - - /// Is the client connected? - pub fn is_connected(&self) -> bool { - self.is_connected.load(atomic::Ordering::SeqCst) - } - - /// Force shut-down the [`ClientListener`] associated with this dialer. - pub fn shutdown(&mut self) { - self.handler.stop(); - } -} - -impl Drop for ServerDialer { - fn drop(&mut self) { - self.shutdown(); - } -} - -impl Drop for ClientDialer { - fn drop(&mut self) { - self.shutdown(); - } -} - -#[cfg(test)] -mod test { - use std::collections::HashMap; - use std::sync::atomic; - - use itertools::Itertools; - use proptest::prelude::*; - use proptest::prop_state_machine; - use proptest::state_machine::{AbstractStateMachine, StateMachineTest}; - use proptest::test_runner::Config; - use test_log::test; - - use super::*; - - prop_state_machine! { - #![proptest_config(Config { - // Instead of the default 256, we only run 10 because otherwise it - // takes too long - cases: 10, - // 10 second timeout - timeout: 10_000, - .. Config::default() - })] - #[test] - /// A `StateMachineTest` implemented on `AbstractState` - fn connections_state_machine_test(sequential 1..20 => AbstractState); - } - - /// Abstract representation of a state of a server and client(s) - #[derive(Clone, Debug)] - struct AbstractState { - // true == running - server: bool, - clients: HashSet, - } - - /// State of a concrete server and client(s) implementation - #[derive(Default)] - struct ConcreteState { - server: Option, - clients: HashMap, - } - - /// State machine transitions - #[derive(Clone, Debug)] - enum Transition { - StartServer, - StopServer, - StartClient(ClientId), - StopClient(ClientId), - ServerMsg(MsgFromServer), - ClientMsg(ClientId, MsgFromClient), - } - - type ClientId = usize; - - struct TestServer { - /// The address of the server (assigned dynamically) - address: SocketAddr, - /// Runtime for the async listener - rt: tokio::runtime::Runtime, - #[allow(dead_code)] - /// Task that runs the async server listener - listener_handle: tokio::task::JoinHandle<()>, - /// A server dialer can send messages to clients - dialer: ServerDialer, - /// Messages received by the `listener` from clients are forwarded - /// to this receiver, to be checked by the test. - msgs_recv: std::sync::mpsc::Receiver, - } - - struct TestClient { - /// A client dialer can send messages to the server - dialer: ClientDialer, - /// A thread that runs the client listener - listener_handle: std::thread::JoinHandle<()>, - /// Messages received by the `listener` from the server are forwarded - /// to this receiver, to be checked by the test. - msgs_recv: std::sync::mpsc::Receiver, - } - - impl StateMachineTest for AbstractState { - type Abstract = Self; - type ConcreteState = ConcreteState; - - fn init_test( - _initial_state: ::State, - ) -> Self::ConcreteState { - ConcreteState::default() - } - - fn apply_concrete( - mut state: Self::ConcreteState, - transition: ::Transition, - ) -> Self::ConcreteState { - match transition { - Transition::StartServer => { - // Assign port dynamically - let (listener, dialer) = - ServerListener::new_pair("127.0.0.1:0"); - let address = listener.address; - let (msgs_send, msgs_recv) = std::sync::mpsc::channel(); - // Run the listener, we need an async runtime - let rt = tokio::runtime::Runtime::new().unwrap(); - let listener_handle = rt.spawn(async move { - listener - .listen(move |msg| { - msgs_send.send(msg).unwrap(); - }) - .await; - }); - - // Wait for the server to be ready - while !dialer.is_ready() { - println!("Waiting for the server to be ready"); - } - - state.server = Some(TestServer { - address, - rt, - dialer, - listener_handle, - msgs_recv, - }) - } - Transition::StopServer => { - // For the server, we have to send abort signal and drop - // the dialer - let mut server = state.server.take().unwrap(); - server.dialer.shutdown(); - server - .rt - .shutdown_timeout(std::time::Duration::from_secs(2)); - drop(server.dialer); - - if !state.clients.is_empty() { - println!( - "The server is waiting for all the clients to \ - stop..." - ); - while state.clients.values().any(|client| { - client - .dialer - .is_connected - .load(atomic::Ordering::SeqCst) - }) {} - // Stop the clients - for (id, client) in - std::mem::take(&mut state.clients).into_iter() - { - // Ask the client to stop - client.dialer.handler.stop(); - println!("Asking client {} listener to stop", id); - // Wait for it to actually stop - client.listener_handle.join().unwrap(); - println!("Client {} listener stopped", id); - } - println!("Clients stopped"); - } - } - Transition::StartClient(id) => { - let server_addr = state.server.as_ref().unwrap().address; - let (listener, dialer) = - ClientListener::new_pair(server_addr); - let (msgs_send, msgs_recv) = std::sync::mpsc::channel(); - let listener_handle = std::thread::spawn(move || { - listener.listen(|msg| { - msgs_send.send(msg).unwrap(); - }) - }); - - // If there is a server running ... - if let Some(server) = state.server.as_ref() { - // ... wait for the client to connect ... - while !dialer.is_connected() {} - // ... and for the server to accept it - while !server.dialer.clients.read().unwrap().iter().any( - |client| { - // Client's address is added once it's accepted - client.addr() == dialer.local_addr - }, - ) {} - } - - state.clients.insert( - id, - TestClient { - dialer, - listener_handle, - msgs_recv, - }, - ); - } - Transition::StopClient(id) => { - // Remove the client - let client = state.clients.remove(&id).unwrap(); - // Ask the client to stop - client.dialer.handler.stop(); - // Wait for it to actually stop - client.listener_handle.join().unwrap(); - } - Transition::ServerMsg(msg) => { - state.server.as_mut().unwrap().dialer.send(msg.clone()); - - // Post-condition: every client must receive the msg - for client in state.clients.values() { - let recv_msg = client.msgs_recv.recv().unwrap(); - assert_eq!(msg, recv_msg); - } - } - Transition::ClientMsg(id, msg) => { - let client = state.clients.get_mut(&id).unwrap(); - client.dialer.send(msg.clone()); - - // Post-condition: - // If there is a server running ... - if let Some(server) = state.server.as_mut() { - // ... it must receive the msg - let recv_msg = server.msgs_recv.recv().unwrap(); - assert_eq!(msg, recv_msg); - } - } - } - state - } - - fn test_sequential( - initial_state: ::State, - transitions: Vec< - ::Transition, - >, - ) { - let mut state = Self::init_test(initial_state); - for transition in transitions { - state = Self::apply_concrete(state, transition); - Self::invariants(&state); - } - - // Shutdown the server gracefully - if let Some(mut server) = state.server { - server.dialer.shutdown(); - server - .rt - .shutdown_timeout(std::time::Duration::from_secs(4)); - } - // Shutdown any clients too - if !state.clients.is_empty() { - println!( - "The server is waiting for all the clients to stop..." - ); - while state.clients.values().any(|client| { - client.dialer.is_connected.load(atomic::Ordering::SeqCst) - }) {} - println!("Clients stopped"); - } - } - } - - impl AbstractStateMachine for AbstractState { - type State = Self; - type Transition = Transition; - - fn init_state() -> BoxedStrategy { - Just(Self { - server: false, - clients: HashSet::default(), - }) - .boxed() - } - - fn transitions(state: &Self::State) -> BoxedStrategy { - use Transition::*; - if state.clients.is_empty() { - prop_oneof![ - Just(StartServer), - Just(StopServer), - (0..4_usize).prop_map(StartClient), - arb_msg_from_server().prop_map(ServerMsg), - ] - .boxed() - } else { - let ids: Vec<_> = - state.clients.iter().sorted().cloned().collect(); - let arb_id = proptest::sample::select(ids); - prop_oneof![ - Just(StartServer), - Just(StopServer), - (0..4_usize).prop_map(StartClient), - arb_msg_from_server().prop_map(ServerMsg), - arb_id.clone().prop_map(StopClient), - arb_id.prop_flat_map(|id| arb_msg_from_client() - .prop_map(move |msg| { ClientMsg(id, msg) })), - ] - .boxed() - } - } - - fn preconditions( - state: &Self::State, - transition: &Self::Transition, - ) -> bool { - match transition { - Transition::StartServer => !state.server, - Transition::StopServer => state.server, - Transition::StartClient(id) => { - // only start clients if the server is running and this - // client ID is not running - state.server && !state.clients.contains(id) - } - Transition::StopClient(id) => { - // stop only if this client is running - state.clients.contains(id) - } - Transition::ServerMsg(_) => { - // can send only if the server is running - state.server - } - Transition::ClientMsg(id, _) => { - // can send only if the server and this client is running - state.server && state.clients.contains(id) - } - } - } - - fn apply_abstract( - mut state: Self::State, - transition: &Self::Transition, - ) -> Self::State { - match transition { - Transition::StartServer => { - state.server = true; - } - Transition::StopServer => { - state.server = false; - // Clients should disconnect and stop - state.clients = Default::default(); - } - Transition::StartClient(id) => { - state.clients.insert(*id); - } - Transition::StopClient(id) => { - state.clients.remove(id); - } - Transition::ServerMsg(_msg) => { - // no change - } - Transition::ClientMsg(_id, _msg) => { - // no change - } - } - state - } - } - - prop_compose! { - /// Generate an arbitrary MsgFromServer - fn arb_msg_from_server() - (id in proptest::collection::vec(any::(), 1..100), - data in proptest::collection::vec(any::(), 1..100)) - -> MsgFromServer { - MsgFromServer::AddIntent { id, data } - } - } - - /// Generate an arbitrary MsgFromClient - fn arb_msg_from_client() -> impl Strategy { - let arb_intent_id = proptest::collection::vec(any::(), 1..100); - let invalid_intent = arb_intent_id - .clone() - .prop_map(|id| MsgFromClient::InvalidIntent { id }); - let intent_too_complex = arb_intent_id - .clone() - .prop_map(|id| MsgFromClient::IntentConstraintsTooComplex { id }); - let ignored_intent = arb_intent_id - .clone() - .prop_map(|id| MsgFromClient::IgnoredIntent { id }); - let unmatched_intent = arb_intent_id - .clone() - .prop_map(|id| MsgFromClient::Unmatched { id }); - let matched_intent = - proptest::collection::hash_set(arb_intent_id, 1..10).prop_map( - move |intent_ids| MsgFromClient::Matched { intent_ids }, - ); - prop_oneof![ - invalid_intent, - intent_too_complex, - ignored_intent, - matched_intent, - unmatched_intent, - ] - } -} diff --git a/apps/src/lib/node/gossip/rpc/mod.rs b/apps/src/lib/node/gossip/rpc/mod.rs deleted file mode 100644 index 541f5b1eb7..0000000000 --- a/apps/src/lib/node/gossip/rpc/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod client; -pub mod matchmakers; diff --git a/apps/src/lib/node/ledger/protocol/mod.rs b/apps/src/lib/node/ledger/protocol/mod.rs index 20ed9f0c9e..e7b85b0f6c 100644 --- a/apps/src/lib/node/ledger/protocol/mod.rs +++ b/apps/src/lib/node/ledger/protocol/mod.rs @@ -12,10 +12,10 @@ use namada::ledger::ibc::vp::{Ibc, IbcToken}; use namada::ledger::native_vp::{self, NativeVp}; use namada::ledger::parameters::{self, ParametersVp}; use namada::ledger::pos::{self, PosVP}; +use namada::ledger::slash_fund::SlashFundVp; use namada::ledger::storage::traits::StorageHasher; use namada::ledger::storage::write_log::WriteLog; use namada::ledger::storage::{DBIter, Storage, DB}; -use namada::ledger::treasury::TreasuryVp; use namada::proto::{self, Tx}; use namada::types::address::{Address, InternalAddress}; use namada::types::storage; @@ -60,8 +60,8 @@ pub enum Error { IbcTokenNativeVpError(namada::ledger::ibc::vp::IbcTokenError), #[error("Governance native VP error: {0}")] GovernanceNativeVpError(namada::ledger::governance::vp::Error), - #[error("Treasury native VP error: {0}")] - TreasuryNativeVpError(namada::ledger::treasury::Error), + #[error("SlashFund native VP error: {0}")] + SlashFundNativeVpError(namada::ledger::slash_fund::Error), #[error("Ethereum bridge native VP error: {0}")] EthBridgeNativeVpError(namada::ledger::eth_bridge::vp::Error), #[error("Ethereum bridge pool native VP error: {0}")] @@ -384,10 +384,13 @@ where } Address::Internal(internal_addr) => { let ctx = native_vp::Ctx::new( + addr, storage, write_log, tx, gas_meter, + &keys_changed, + &verifiers, vp_wasm_cache.clone(), ); let tx_data = match tx.data.as_ref() { @@ -460,12 +463,12 @@ where gas_meter = governance.ctx.gas_meter.into_inner(); result } - InternalAddress::Treasury => { - let treasury = TreasuryVp { ctx }; - let result = treasury + InternalAddress::SlashFund => { + let slash_fund = SlashFundVp { ctx }; + let result = slash_fund .validate_tx(tx_data, &keys_changed, &verifiers) - .map_err(Error::TreasuryNativeVpError); - gas_meter = treasury.ctx.gas_meter.into_inner(); + .map_err(Error::SlashFundNativeVpError); + gas_meter = slash_fund.ctx.gas_meter.into_inner(); result } InternalAddress::IbcEscrow(_) diff --git a/apps/src/lib/node/ledger/shell/finalize_block.rs b/apps/src/lib/node/ledger/shell/finalize_block.rs index 70c2adbd77..c5119f5cf1 100644 --- a/apps/src/lib/node/ledger/shell/finalize_block.rs +++ b/apps/src/lib/node/ledger/shell/finalize_block.rs @@ -1,22 +1,12 @@ //! Implementation of the `FinalizeBlock` ABCI++ method for the Shell -use namada::ledger::governance::storage as gov_storage; -use namada::ledger::governance::utils::{ - compute_tally, get_proposal_votes, ProposalEvent, -}; -use namada::ledger::governance::vp::ADDRESS as gov_address; -use namada::ledger::storage::types::encode; -use namada::ledger::treasury::ADDRESS as treasury_address; -use namada::types::address::{xan as m1t, Address}; -use namada::types::governance::TallyResult; -use namada::types::storage::{BlockHash, Epoch, Header}; +use namada::types::storage::{BlockHash, Header}; use namada::types::transaction::protocol::ProtocolTxType; -use super::queries::QueriesExt; +use super::governance::execute_governance_proposals; use super::*; use crate::facade::tendermint_proto::abci::Misbehavior as Evidence; use crate::facade::tendermint_proto::crypto::PublicKey as TendermintPublicKey; -use crate::node::ledger::events::EventType; impl Shell where @@ -56,166 +46,8 @@ where self.update_state(req.header, req.hash, req.byzantine_validators); if new_epoch { - for id in std::mem::take(&mut self.proposal_data) { - let proposal_funds_key = gov_storage::get_funds_key(id); - let proposal_start_epoch_key = - gov_storage::get_voting_start_epoch_key(id); - - let funds = self - .read_storage_key::(&proposal_funds_key) - .ok_or_else(|| { - Error::BadProposal( - id, - "Invalid proposal funds.".to_string(), - ) - })?; - let proposal_start_epoch = self - .read_storage_key::(&proposal_start_epoch_key) - .ok_or_else(|| { - Error::BadProposal( - id, - "Invalid proposal start_epoch.".to_string(), - ) - })?; - - let votes = - get_proposal_votes(&self.storage, proposal_start_epoch, id); - let tally_result = - compute_tally(&self.storage, proposal_start_epoch, votes); - - let transfer_address = match tally_result { - TallyResult::Passed => { - let proposal_author_key = - gov_storage::get_author_key(id); - let proposal_author = self - .read_storage_key::
(&proposal_author_key) - .ok_or_else(|| { - Error::BadProposal( - id, - "Invalid proposal author.".to_string(), - ) - })?; - - let proposal_code_key = - gov_storage::get_proposal_code_key(id); - let proposal_code = - self.read_storage_key_bytes(&proposal_code_key); - match proposal_code { - Some(proposal_code) => { - let tx = - Tx::new(proposal_code, Some(encode(&id))); - let pending_execution_key = - gov_storage::get_proposal_execution_key(id); - self.storage - .write(&pending_execution_key, "") - .expect( - "Should be able to write to storage.", - ); - let tx_result = protocol::apply_wasm_tx( - tx, - 0, /* this is used to compute the fee - * based on the code size. We dont - * need it here. */ - self.into(), - ); - self.storage - .delete(&pending_execution_key) - .expect( - "Should be able to delete the storage.", - ); - match tx_result { - Ok(tx_result) => { - if tx_result.is_accepted() { - self.write_log.commit_tx(); - let proposal_event: Event = - ProposalEvent::new( - EventType::Proposal - .to_string(), - TallyResult::Passed, - id, - true, - true, - ) - .into(); - response - .events - .push(proposal_event); - - proposal_author - } else { - self.write_log.drop_tx(); - let proposal_event: Event = - ProposalEvent::new( - EventType::Proposal - .to_string(), - TallyResult::Passed, - id, - true, - false, - ) - .into(); - response - .events - .push(proposal_event); - - treasury_address - } - } - Err(_e) => { - self.write_log.drop_tx(); - let proposal_event: Event = - ProposalEvent::new( - EventType::Proposal.to_string(), - TallyResult::Passed, - id, - true, - false, - ) - .into(); - response.events.push(proposal_event); - - treasury_address - } - } - } - None => { - let proposal_event: Event = ProposalEvent::new( - EventType::Proposal.to_string(), - TallyResult::Passed, - id, - false, - false, - ) - .into(); - response.events.push(proposal_event); - - proposal_author - } - } - } - TallyResult::Rejected | TallyResult::Unknown => { - let proposal_event: Event = ProposalEvent::new( - EventType::Proposal.to_string(), - TallyResult::Rejected, - id, - false, - false, - ) - .into(); - response.events.push(proposal_event); - - treasury_address - } - }; - - // transfer proposal locked funds - self.storage.transfer( - &m1t(), - funds, - &gov_address, - &transfer_address, - ); - } + let _proposals_result = + execute_governance_proposals(self, &mut response)?; } for processed_tx in &req.txs { @@ -501,20 +333,6 @@ where let update = ValidatorUpdate { pub_key, power }; response.validator_updates.push(update); }); - - // Update evidence parameters - let (epoch_duration, _gas) = - parameters::read_epoch_parameter(&self.storage) - .expect("Couldn't read epoch duration parameters"); - let pos_params = self.storage.read_pos_params(); - let evidence_params = self - .storage - .get_evidence_params(&epoch_duration, &pos_params); - - response.consensus_param_updates = Some(ConsensusParams { - evidence: Some(evidence_params), - ..response.consensus_param_updates.take().unwrap_or_default() - }); } } @@ -907,7 +725,7 @@ mod test_finalize_block { let signed = MultiSignedEthEvent { event, #[cfg(feature = "abcipp")] - signers: HashSet::from([address.clone()]), + signers: BTreeSet::from([address.clone()]), #[cfg(not(feature = "abcipp"))] signers: BTreeSet::from([( address.clone(), diff --git a/apps/src/lib/node/ledger/shell/governance.rs b/apps/src/lib/node/ledger/shell/governance.rs new file mode 100644 index 0000000000..f5e9505909 --- /dev/null +++ b/apps/src/lib/node/ledger/shell/governance.rs @@ -0,0 +1,186 @@ +use namada::ledger::governance::storage as gov_storage; +use namada::ledger::governance::utils::{ + compute_tally, get_proposal_votes, ProposalEvent, +}; +use namada::ledger::governance::vp::ADDRESS as gov_address; +use namada::ledger::slash_fund::ADDRESS as slash_fund_address; +use namada::ledger::storage::types::encode; +use namada::ledger::storage::{DBIter, DB}; +use namada::types::address::{xan as m1t, Address}; +use namada::types::governance::TallyResult; +use namada::types::storage::Epoch; +use namada::types::token; + +use super::*; +use crate::node::ledger::events::EventType; + +#[derive(Default)] +pub struct ProposalsResult { + passed: Vec, + rejected: Vec, +} + +pub fn execute_governance_proposals( + shell: &mut Shell, + response: &mut shim::response::FinalizeBlock, +) -> Result +where + D: DB + for<'iter> DBIter<'iter> + Sync + 'static, + H: StorageHasher + Sync + 'static, +{ + let mut proposals_result = ProposalsResult::default(); + + for id in std::mem::take(&mut shell.proposal_data) { + let proposal_funds_key = gov_storage::get_funds_key(id); + let proposal_end_epoch_key = gov_storage::get_voting_end_epoch_key(id); + + let funds = shell + .read_storage_key::(&proposal_funds_key) + .ok_or_else(|| { + Error::BadProposal(id, "Invalid proposal funds.".to_string()) + })?; + let proposal_end_epoch = shell + .read_storage_key::(&proposal_end_epoch_key) + .ok_or_else(|| { + Error::BadProposal( + id, + "Invalid proposal end_epoch.".to_string(), + ) + })?; + + let votes = get_proposal_votes(&shell.storage, proposal_end_epoch, id); + let tally_result = + compute_tally(&shell.storage, proposal_end_epoch, votes); + + let transfer_address = match tally_result { + TallyResult::Passed => { + let proposal_author_key = gov_storage::get_author_key(id); + let proposal_author = shell + .read_storage_key::
(&proposal_author_key) + .ok_or_else(|| { + Error::BadProposal( + id, + "Invalid proposal author.".to_string(), + ) + })?; + + let proposal_code_key = gov_storage::get_proposal_code_key(id); + let proposal_code = + shell.read_storage_key_bytes(&proposal_code_key); + match proposal_code { + Some(proposal_code) => { + let tx = Tx::new(proposal_code, Some(encode(&id))); + let tx_type = + TxType::Decrypted(DecryptedTx::Decrypted(tx)); + let pending_execution_key = + gov_storage::get_proposal_execution_key(id); + shell + .storage + .write(&pending_execution_key, "") + .expect("Should be able to write to storage."); + let tx_result = protocol::dispatch_tx( + tx_type, + 0, /* this is used to compute the fee + * based on the code size. We dont + * need it here. */ + &mut BlockGasMeter::default(), + &mut shell.write_log, + &mut shell.storage, + &mut shell.vp_wasm_cache, + &mut shell.tx_wasm_cache, + ); + shell + .storage + .delete(&pending_execution_key) + .expect("Should be able to delete the storage."); + match tx_result { + Ok(tx_result) => { + if tx_result.is_accepted() { + shell.write_log.commit_tx(); + let proposal_event: Event = + ProposalEvent::new( + EventType::Proposal.to_string(), + TallyResult::Passed, + id, + true, + true, + ) + .into(); + response.events.push(proposal_event); + proposals_result.passed.push(id); + + proposal_author + } else { + shell.write_log.drop_tx(); + let proposal_event: Event = + ProposalEvent::new( + EventType::Proposal.to_string(), + TallyResult::Passed, + id, + true, + false, + ) + .into(); + response.events.push(proposal_event); + proposals_result.rejected.push(id); + + slash_fund_address + } + } + Err(_e) => { + shell.write_log.drop_tx(); + let proposal_event: Event = ProposalEvent::new( + EventType::Proposal.to_string(), + TallyResult::Passed, + id, + true, + false, + ) + .into(); + response.events.push(proposal_event); + proposals_result.rejected.push(id); + + slash_fund_address + } + } + } + None => { + let proposal_event: Event = ProposalEvent::new( + EventType::Proposal.to_string(), + TallyResult::Passed, + id, + false, + false, + ) + .into(); + response.events.push(proposal_event); + proposals_result.passed.push(id); + + proposal_author + } + } + } + TallyResult::Rejected | TallyResult::Unknown => { + let proposal_event: Event = ProposalEvent::new( + EventType::Proposal.to_string(), + TallyResult::Rejected, + id, + false, + false, + ) + .into(); + response.events.push(proposal_event); + proposals_result.rejected.push(id); + + slash_fund_address + } + }; + + // transfer proposal locked funds + shell + .storage + .transfer(&m1t(), funds, &gov_address, &transfer_address); + } + + Ok(proposals_result) +} diff --git a/apps/src/lib/node/ledger/shell/init_chain.rs b/apps/src/lib/node/ledger/shell/init_chain.rs index 77a23003da..890e8a4513 100644 --- a/apps/src/lib/node/ledger/shell/init_chain.rs +++ b/apps/src/lib/node/ledger/shell/init_chain.rs @@ -2,15 +2,20 @@ use std::collections::HashMap; use std::hash::Hash; +use namada::ledger::storage::traits::StorageHasher; +use namada::ledger::storage::{DBIter, DB}; +use namada::ledger::{ibc, pos}; use namada::types::key::*; +use namada::types::time::{DateTimeUtc, TimeZone, Utc}; +use namada::types::token; #[cfg(not(feature = "dev"))] use sha2::{Digest, Sha256}; -use super::queries::QueriesExt; use super::*; use crate::facade::tendermint_proto::abci; use crate::facade::tendermint_proto::crypto::PublicKey as TendermintPublicKey; use crate::facade::tendermint_proto::google::protobuf; +use crate::facade::tower_abci::{request, response}; use crate::wasm_loader; impl Shell @@ -21,7 +26,6 @@ where /// Create a new genesis for the chain with specified id. This includes /// 1. A set of initial users and tokens /// 2. Setting up the validity predicates for both users and tokens - /// 3. A matchmaker pub fn init_chain( &mut self, init: request::InitChain, @@ -61,7 +65,6 @@ where genesis.parameters.init_storage(&mut self.storage); genesis.gov_params.init_storage(&mut self.storage); - genesis.treasury_params.init_storage(&mut self.storage); // Depends on parameters being initialized self.storage @@ -84,10 +87,16 @@ where storage, } in genesis.established_accounts { - let vp_code = vp_code_cache - .get_or_insert_with(vp_code_path.clone(), || { - wasm_loader::read_wasm(&self.wasm_dir, &vp_code_path) - }); + let vp_code = match vp_code_cache.get(&vp_code_path).cloned() { + Some(vp_code) => vp_code, + None => { + let wasm = + wasm_loader::read_wasm(&self.wasm_dir, &vp_code_path) + .map_err(Error::ReadingWasm)?; + vp_code_cache.insert(vp_code_path.clone(), wasm.clone()); + wasm + } + }; // In dev, we don't check the hash #[cfg(feature = "dev")] @@ -139,9 +148,10 @@ where balances, } in genesis.token_accounts { - let vp_code = vp_code_cache - .get_or_insert_with(vp_code_path.clone(), || { + let vp_code = + vp_code_cache.get_or_insert_with(vp_code_path.clone(), || { wasm_loader::read_wasm(&self.wasm_dir, &vp_code_path) + .unwrap() }); // In dev, we don't check the hash @@ -183,6 +193,7 @@ where &self.wasm_dir, &validator.validator_vp_code_path, ) + .unwrap() }, ); @@ -258,15 +269,6 @@ where ); ibc::init_genesis_storage(&mut self.storage); - let evidence_params = self.storage.get_evidence_params( - &genesis.parameters.epoch_duration, - &genesis.pos_params, - ); - response.consensus_params = Some(ConsensusParams { - evidence: Some(evidence_params), - ..response.consensus_params.unwrap_or_default() - }); - // Set the initial validator set for validator in genesis.validators { let mut abci_validator = abci::ValidatorUpdate::default(); diff --git a/apps/src/lib/node/ledger/shell/mod.rs b/apps/src/lib/node/ledger/shell/mod.rs index f926d940b8..ae8aade9b4 100644 --- a/apps/src/lib/node/ledger/shell/mod.rs +++ b/apps/src/lib/node/ledger/shell/mod.rs @@ -6,6 +6,7 @@ //! (unless we can simply overwrite them in the next block). //! More info in . mod finalize_block; +mod governance; mod init_chain; mod prepare_proposal; mod process_proposal; @@ -22,6 +23,7 @@ use std::str::FromStr; use borsh::{BorshDeserialize, BorshSerialize}; use namada::ledger::gas::BlockGasMeter; +use namada::ledger::pos; use namada::ledger::pos::namada_proof_of_stake::types::{ ActiveValidator, ValidatorSetUpdate, }; @@ -29,18 +31,16 @@ use namada::ledger::pos::namada_proof_of_stake::PosBase; use namada::ledger::storage::traits::{Sha256Hasher, StorageHasher}; use namada::ledger::storage::write_log::WriteLog; use namada::ledger::storage::{DBIter, Storage, DB}; -use namada::ledger::{ibc, parameters, pos}; use namada::proto::{self, Tx}; +use namada::types::address; use namada::types::chain::ChainId; use namada::types::ethereum_events::EthereumEvent; use namada::types::key::*; use namada::types::storage::{BlockHeight, Key}; -use namada::types::time::{DateTimeUtc, TimeZone, Utc}; use namada::types::transaction::{ hash_tx, process_tx, verify_decrypted_correctly, AffineCurve, DecryptedTx, EllipticCurve, PairingEngine, TxType, WrapperTx, }; -use namada::types::{address, token}; use namada::vm::wasm::{TxCache, VpCache}; use namada::vm::WasmCacheRwAccess; use num_derive::{FromPrimitive, ToPrimitive}; @@ -55,10 +55,6 @@ use crate::facade::tendermint_proto::abci::{ Misbehavior as Evidence, MisbehaviorType as EvidenceType, ValidatorUpdate, }; use crate::facade::tendermint_proto::crypto::public_key; -#[cfg(not(feature = "abcipp"))] -use crate::facade::tendermint_proto::types::ConsensusParams; -#[cfg(feature = "abcipp")] -use crate::facade::tendermint_proto::types::ConsensusParams; use crate::facade::tower_abci::{request, response}; use crate::node::ledger::events::log::EventLog; use crate::node::ledger::events::Event; @@ -104,6 +100,8 @@ pub enum Error { Broadcaster(tokio::sync::mpsc::error::TryRecvError), #[error("Error executing proposal {0}: {1}")] BadProposal(u64, String), + #[error("Error reading wasm: {0}")] + ReadingWasm(#[from] eyre::Error), } impl From for TxResult { @@ -393,11 +391,10 @@ where ); let wallet = wallet::Wallet::load_or_new_from_genesis( wallet_path, - move || { - genesis::genesis_config::open_genesis_config( - genesis_path, - ) - }, + genesis::genesis_config::open_genesis_config( + genesis_path, + ) + .unwrap(), ); wallet .take_validator_data() @@ -542,6 +539,7 @@ where let pos_params = self.storage.read_pos_params(); let current_epoch = self.storage.block.epoch; for evidence in byzantine_validators { + tracing::info!("Processing evidence {evidence:?}."); let evidence_height = match u64::try_from(evidence.height) { Ok(height) => height, Err(err) => { @@ -567,6 +565,13 @@ where continue; } }; + if evidence_epoch + pos_params.unbonding_len <= current_epoch { + tracing::info!( + "Skipping outdated evidence from epoch \ + {evidence_epoch}" + ); + continue; + } let slash_type = match EvidenceType::from_i32(evidence.r#type) { Some(r#type) => match r#type { EvidenceType::DuplicateVote => { @@ -592,19 +597,7 @@ where } }; let validator_raw_hash = match evidence.validator { - Some(validator) => { - match String::from_utf8(validator.address) { - Ok(raw_hash) => raw_hash, - Err(err) => { - tracing::error!( - "Evidence failed to decode validator \ - address from utf-8 with {}", - err - ); - continue; - } - } - } + Some(validator) => tm_raw_hash_to_string(validator.address), None => { tracing::error!( "Evidence without a validator {:#?}", @@ -628,9 +621,9 @@ where }; tracing::info!( "Slashing {} for {} in epoch {}, block height {}", - evidence_epoch, - slash_type, validator, + slash_type, + evidence_epoch, evidence_height ); if let Err(err) = self.storage.slash( @@ -758,10 +751,10 @@ where let genesis_path = &self .base_dir .join(format!("{}.toml", self.chain_id.as_str())); - let mut wallet = - wallet::Wallet::load_or_new_from_genesis(wallet_path, move || { - genesis::genesis_config::open_genesis_config(genesis_path) - }); + let mut wallet = wallet::Wallet::load_or_new_from_genesis( + wallet_path, + genesis::genesis_config::open_genesis_config(genesis_path).unwrap(), + ); self.mode.get_validator_address().map(|addr| { let pk_bytes = self .storage @@ -803,11 +796,13 @@ mod test_utils { use namada::types::hash::Hash; use namada::types::key::*; use namada::types::storage::{BlockHash, Epoch, Header}; + use namada::types::time::DateTimeUtc; use namada::types::transaction::Fee; use tempfile::tempdir; use tokio::sync::mpsc::{Sender, UnboundedReceiver}; use super::*; + #[cfg(feature = "abciplus")] use crate::facade::tendermint_proto::abci::{ RequestInitChain, RequestProcessProposal, }; diff --git a/apps/src/lib/node/ledger/shell/process_proposal.rs b/apps/src/lib/node/ledger/shell/process_proposal.rs index e4203860c6..bf341a51a0 100644 --- a/apps/src/lib/node/ledger/shell/process_proposal.rs +++ b/apps/src/lib/node/ledger/shell/process_proposal.rs @@ -1,6 +1,7 @@ //! Implementation of the ['VerifyHeader`], [`ProcessProposal`], //! and [`RevertProposal`] ABCI++ methods for the Shell +use data_encoding::HEXUPPER; use namada::ledger::pos::types::VotingPower; use namada::types::transaction::protocol::ProtocolTxType; #[cfg(feature = "abcipp")] @@ -44,9 +45,9 @@ where req: RequestProcessProposal, ) -> ProcessProposal { tracing::info!( - proposer = ?hex::encode(&req.proposer_address), + proposer = ?HEXUPPER.encode(&req.proposer_address), height = req.height, - hash = ?hex::encode(&req.hash), + hash = ?HEXUPPER.encode(&req.hash), n_txs = req.txs.len(), "Received block proposal", ); @@ -58,9 +59,9 @@ where !self.has_proper_eth_events_num(&counters); if invalid_num_of_eth_ev_digests { tracing::warn!( - proposer = ?hex::encode(&req.proposer_address), + proposer = ?HEXUPPER.encode(&req.proposer_address), height = req.height, - hash = ?hex::encode(&req.hash), + hash = ?HEXUPPER.encode(&req.hash), eth_ev_digest_num = counters.eth_ev_digest_num, "Found invalid number of Ethereum events vote extension digests, proposed block \ will be rejected" @@ -71,9 +72,9 @@ where !self.has_proper_valset_upd_num(&counters); if invalid_num_of_valset_upd_digests { tracing::warn!( - proposer = ?hex::encode(&req.proposer_address), + proposer = ?HEXUPPER.encode(&req.proposer_address), height = req.height, - hash = ?hex::encode(&req.hash), + hash = ?HEXUPPER.encode(&req.hash), valset_upd_digest_num = counters.valset_upd_digest_num, "Found invalid number of validator set update vote extension digests, proposed block \ will be rejected" @@ -92,9 +93,9 @@ where }); if invalid_txs { tracing::warn!( - proposer = ?hex::encode(&req.proposer_address), + proposer = ?HEXUPPER.encode(&req.proposer_address), height = req.height, - hash = ?hex::encode(&req.hash), + hash = ?HEXUPPER.encode(&req.hash), "Found invalid transactions, proposed block will be rejected" ); } diff --git a/apps/src/lib/node/ledger/shell/queries.rs b/apps/src/lib/node/ledger/shell/queries.rs index 9203d4f3e8..1fad92eb9d 100644 --- a/apps/src/lib/node/ledger/shell/queries.rs +++ b/apps/src/lib/node/ledger/shell/queries.rs @@ -1,4 +1,5 @@ //! Shell methods for querying state + use std::cmp::max; use borsh::{BorshDeserialize, BorshSerialize}; diff --git a/apps/src/lib/node/ledger/shims/abcipp_shim.rs b/apps/src/lib/node/ledger/shims/abcipp_shim.rs index 5851441a0e..6975207c07 100644 --- a/apps/src/lib/node/ledger/shims/abcipp_shim.rs +++ b/apps/src/lib/node/ledger/shims/abcipp_shim.rs @@ -15,7 +15,6 @@ use namada::types::transaction::hash_tx; use tokio::sync::mpsc::{Receiver, UnboundedSender}; use tower::Service; -use super::super::Shell; use super::abcipp_shim_types::shim::request::{FinalizeBlock, ProcessedTx}; #[cfg(not(feature = "abcipp"))] use super::abcipp_shim_types::shim::TxBytes; @@ -24,6 +23,7 @@ use crate::config; #[cfg(not(feature = "abcipp"))] use crate::facade::tendermint_proto::abci::RequestBeginBlock; use crate::facade::tower_abci::{BoxError, Request as Req, Response as Resp}; +use crate::node::ledger::shell::Shell; /// The shim wraps the shell, which implements ABCI++. /// The shim makes a crude translation between the ABCI interface currently used diff --git a/apps/src/lib/node/ledger/storage/rocksdb.rs b/apps/src/lib/node/ledger/storage/rocksdb.rs index 069b6ed0a7..5085663c41 100644 --- a/apps/src/lib/node/ledger/storage/rocksdb.rs +++ b/apps/src/lib/node/ledger/storage/rocksdb.rs @@ -323,10 +323,14 @@ impl DB for RocksDB { let mut epoch = None; let mut pred_epochs = None; let mut address_gen = None; - for (key, bytes) in self.0.iterator_opt( + for value in self.0.iterator_opt( IteratorMode::From(prefix.as_bytes(), Direction::Forward), read_opts, ) { + let (key, bytes) = match value { + Ok(data) => data, + Err(e) => return Err(Error::DBError(e.into_string())), + }; let path = &String::from_utf8((*key).to_vec()).map_err(|e| { Error::Temporary { error: format!( @@ -802,24 +806,36 @@ impl<'iter> DBIter<'iter> for RocksDB { &'iter self, prefix: &Key, ) -> PersistentPrefixIterator<'iter> { - let db_prefix = "subspace/".to_owned(); - let prefix = format!("{}{}", db_prefix, prefix); + iter_prefix(self, prefix, Direction::Forward) + } - let mut read_opts = ReadOptions::default(); - // don't use the prefix bloom filter - read_opts.set_total_order_seek(true); - let mut upper_prefix = prefix.clone().into_bytes(); - if let Some(last) = upper_prefix.pop() { - upper_prefix.push(last + 1); - } - read_opts.set_iterate_upper_bound(upper_prefix); + fn rev_iter_prefix(&'iter self, prefix: &Key) -> Self::PrefixIter { + iter_prefix(self, prefix, Direction::Reverse) + } +} - let iter = self.0.iterator_opt( - IteratorMode::From(prefix.as_bytes(), Direction::Forward), - read_opts, - ); - PersistentPrefixIterator(PrefixIterator::new(iter, db_prefix)) +fn iter_prefix<'iter>( + db: &'iter RocksDB, + prefix: &Key, + direction: Direction, +) -> PersistentPrefixIterator<'iter> { + let db_prefix = "subspace/".to_owned(); + let prefix = format!("{}{}", db_prefix, prefix); + + let mut read_opts = ReadOptions::default(); + // don't use the prefix bloom filter + read_opts.set_total_order_seek(true); + let mut upper_prefix = prefix.clone().into_bytes(); + if let Some(last) = upper_prefix.pop() { + upper_prefix.push(last + 1); } + read_opts.set_iterate_upper_bound(upper_prefix); + + let iter = db.0.iterator_opt( + IteratorMode::From(prefix.as_bytes(), direction), + read_opts, + ); + PersistentPrefixIterator(PrefixIterator::new(iter, db_prefix)) } #[derive(Debug)] @@ -833,7 +849,9 @@ impl<'a> Iterator for PersistentPrefixIterator<'a> { /// Returns the next pair and the gas cost fn next(&mut self) -> Option<(String, Vec, u64)> { match self.0.iter.next() { - Some((key, val)) => { + Some(result) => { + let (key, val) = + result.expect("Prefix iterator shouldn't fail"); let key = String::from_utf8(key.to_vec()) .expect("Cannot convert from bytes to key string"); match key.strip_prefix(&self.0.db_prefix) { diff --git a/apps/src/lib/node/ledger/tendermint_node.rs b/apps/src/lib/node/ledger/tendermint_node.rs index b065bb7c59..0b4e6b2e9e 100644 --- a/apps/src/lib/node/ledger/tendermint_node.rs +++ b/apps/src/lib/node/ledger/tendermint_node.rs @@ -5,9 +5,11 @@ use std::str::FromStr; use borsh::BorshSerialize; use eyre::{eyre, Context}; -use namada::types::address::Address; use namada::types::chain::ChainId; -use namada::types::key::*; +use namada::types::key::{ + common, ed25519, secp256k1, tm_consensus_key_raw_hash, ParseSecretKeyError, + RefTo, SecretKey, +}; use namada::types::time::DateTimeUtc; use semver::{Version, VersionReq}; use serde_json::json; @@ -28,7 +30,7 @@ pub const ENV_VAR_TM_STDOUT: &str = "ANOMA_TM_STDOUT"; #[cfg(feature = "abciplus")] pub const VERSION_REQUIREMENTS: &str = ">= 0.37.0-alpha.2, <0.38.0"; -#[cfg(feature = "abcipp")] +#[cfg(not(feature = "abciplus"))] // TODO: update from our v0.36-based fork to v0.38 for full ABCI++ pub const VERSION_REQUIREMENTS: &str = "= 0.1.1-abcipp"; @@ -168,23 +170,10 @@ pub async fn run( #[cfg(feature = "dev")] { - let genesis = &crate::config::genesis::genesis(); let consensus_key = crate::wallet::defaults::validator_keypair(); // write the validator key file if it didn't already exist if !has_validator_key { - write_validator_key_async( - &home_dir, - &genesis - .validators - .first() - .expect( - "There should be one genesis validator in \"dev\" mode", - ) - .pos_data - .address, - &consensus_key, - ) - .await; + write_validator_key_async(&home_dir, &consensus_key).await; } } #[cfg(feature = "abcipp")] @@ -273,11 +262,9 @@ pub fn reset(tendermint_dir: impl AsRef) -> Result<()> { /// Convert a common signing scheme validator key into JSON for /// Tendermint fn validator_key_to_json( - address: &Address, sk: &common::SecretKey, ) -> std::result::Result { - let address = address.raw_hash().unwrap(); - + let raw_hash = tm_consensus_key_raw_hash(&sk.ref_to()); let (id_str, pk_arr, kp_arr) = match sk { common::SecretKey::Ed25519(_) => { let sk_ed: ed25519::SecretKey = sk.try_to_sk().unwrap(); @@ -299,7 +286,7 @@ fn validator_key_to_json( }; Ok(json!({ - "address": address, + "address": raw_hash, "pub_key": { "type": format!("tendermint/PubKey{}",id_str), "value": base64::encode(pk_arr), @@ -314,7 +301,6 @@ fn validator_key_to_json( /// Initialize validator private key for Tendermint pub async fn write_validator_key_async( home_dir: impl AsRef, - address: &Address, consensus_key: &common::SecretKey, ) { let home_dir = home_dir.as_ref(); @@ -331,7 +317,7 @@ pub async fn write_validator_key_async( .open(&path) .await .expect("Couldn't create private validator key file"); - let key = validator_key_to_json(address, consensus_key).unwrap(); + let key = validator_key_to_json(consensus_key).unwrap(); let data = serde_json::to_vec_pretty(&key) .expect("Couldn't encode private validator key file"); file.write_all(&data[..]) @@ -342,7 +328,6 @@ pub async fn write_validator_key_async( /// Initialize validator private key for Tendermint pub fn write_validator_key( home_dir: impl AsRef, - address: &Address, consensus_key: &common::SecretKey, ) { let home_dir = home_dir.as_ref(); @@ -357,7 +342,7 @@ pub fn write_validator_key( .truncate(true) .open(&path) .expect("Couldn't create private validator key file"); - let key = validator_key_to_json(address, consensus_key).unwrap(); + let key = validator_key_to_json(consensus_key).unwrap(); serde_json::to_writer_pretty(file, &key) .expect("Couldn't write private validator key file"); } diff --git a/apps/src/lib/node/matchmaker.rs b/apps/src/lib/node/matchmaker.rs deleted file mode 100644 index edd7a1ac32..0000000000 --- a/apps/src/lib/node/matchmaker.rs +++ /dev/null @@ -1,364 +0,0 @@ -use std::env; -use std::net::SocketAddr; -use std::path::{Path, PathBuf}; -use std::rc::Rc; -use std::sync::Arc; - -use borsh::{BorshDeserialize, BorshSerialize}; -use libc::c_void; -use libloading::Library; -use namada::proto::Tx; -use namada::types::address::{self, Address}; -use namada::types::dylib; -use namada::types::intent::{IntentTransfers, MatchedExchanges}; -use namada::types::key::*; -use namada::types::matchmaker::AddIntentResult; -use namada::types::transaction::{hash_tx, Fee, WrapperTx}; - -use super::gossip::rpc::matchmakers::{ - ClientDialer, ClientListener, MsgFromClient, MsgFromServer, -}; -use crate::cli::args; -use crate::client::rpc; -use crate::client::tendermint_rpc_types::TxBroadcastData; -use crate::client::tx::broadcast_tx; -use crate::facade::tendermint_config::net; -use crate::facade::tendermint_config::net::Address as TendermintAddress; -use crate::{cli, config, wasm_loader}; - -/// Run a matchmaker -#[tokio::main] -pub async fn run( - config::Matchmaker { - matchmaker_path, - tx_code_path, - }: config::Matchmaker, - intent_gossiper_addr: SocketAddr, - ledger_addr: TendermintAddress, - tx_signing_key: Rc, - tx_source_address: Address, - wasm_dir: impl AsRef, -) { - let matchmaker_path = matchmaker_path.unwrap_or_else(|| { - eprintln!("Please configure or specify the matchmaker path"); - cli::safe_exit(1); - }); - let tx_code_path = tx_code_path.unwrap_or_else(|| { - eprintln!("Please configure or specify the transaction code path"); - cli::safe_exit(1); - }); - - let (runner, result_handler) = Runner::new_pair( - intent_gossiper_addr, - matchmaker_path, - tx_code_path, - ledger_addr, - tx_signing_key, - tx_source_address, - wasm_dir, - ); - - // Instantiate and run the matchmaker implementation in a dedicated thread - let runner_join_handle = std::thread::spawn(move || { - runner.listen(); - }); - - // Process results async - result_handler.run().await; - - if let Err(error) = runner_join_handle.join() { - eprintln!("Matchmaker runner failed with: {:?}", error); - cli::safe_exit(1) - } -} - -/// A matchmaker receive intents and tries to find a match with previously -/// received intent. -#[derive(Debug)] -pub struct Runner { - matchmaker_path: PathBuf, - /// The client listener. This is consumed once the listener is started with - /// [`Runner::listen`]. - listener: Option, - /// Sender of results of matched intents to the [`ResultHandler`]. - result_send: tokio::sync::mpsc::UnboundedSender, -} - -/// Result handler processes the results sent from the matchmaker [`Runner`]. -#[derive(Debug)] -pub struct ResultHandler { - /// A dialer can send messages to the connected intent gossip node - dialer: ClientDialer, - /// A receiver of matched intents results from the [`Runner`]. - result_recv: tokio::sync::mpsc::UnboundedReceiver, - /// The ledger address to send any crafted transaction to - ledger_address: net::Address, - /// The code of the transaction that is going to be send to a ledger. - tx_code: Vec, - /// A source address for transactions created from intents. - tx_source_address: Address, - /// A keypair that will be used to sign transactions. - tx_signing_key: Rc, -} - -/// The loaded implementation's dylib and its state -#[derive(Debug)] -struct MatchmakerImpl { - /// The matchmaker's state as a raw mutable pointer to allow custom user - /// implementation in a dylib. - /// NOTE: The `state` field MUST be above the `library` field to ensure - /// that its destructor is ran before the implementation code is dropped. - state: MatchmakerState, - /// Matchmaker's implementation loaded from dylib - library: Library, -} - -/// The matchmaker's state as a raw mutable pointer to allow custom user -/// implementation in a dylib -#[derive(Debug)] -struct MatchmakerState(Arc<*mut c_void>); - -impl Runner { - /// Create a new matchmaker and a dialer that can be used to send messages - /// to the intent gossiper node. - pub fn new_pair( - intent_gossiper_addr: SocketAddr, - matchmaker_path: PathBuf, - tx_code_path: PathBuf, - ledger_address: TendermintAddress, - tx_signing_key: Rc, - tx_source_address: Address, - wasm_dir: impl AsRef, - ) -> (Self, ResultHandler) { - // Setup a channel for sending matchmaker results from `Self` to the - // `ResultHandler` - let (result_send, result_recv) = tokio::sync::mpsc::unbounded_channel(); - - // Prepare a client for intent gossiper node connection - let (listener, dialer) = ClientListener::new_pair(intent_gossiper_addr); - - let tx_code = wasm_loader::read_wasm(&wasm_dir, tx_code_path); - - ( - Self { - matchmaker_path, - listener: Some(listener), - result_send, - }, - ResultHandler { - dialer, - result_recv, - ledger_address, - tx_code, - tx_source_address, - tx_signing_key, - }, - ) - } - - pub fn listen(mut self) { - // Load the implementation's dylib and instantiate it. We have to do - // that here instead of `Self::new_pair`, because we cannot send - // it across threads and the listener is launched in a dedicated thread. - - // Check or add a filename extension to matchmaker path - let matchmaker_filename = - if let Some(ext) = self.matchmaker_path.extension() { - if ext != dylib::FILE_EXT { - tracing::warn!( - "Unexpected matchmaker file extension. Expected {}, \ - got {}.", - dylib::FILE_EXT, - ext.to_string_lossy(), - ); - } - self.matchmaker_path.clone() - } else { - let mut filename = self.matchmaker_path.clone(); - filename.set_extension(dylib::FILE_EXT); - filename - }; - - let matchmaker_dylib = if matchmaker_filename.is_absolute() { - // If the path is absolute, use it as is - matchmaker_filename - } else { - // The dylib should be built in the same directory as where Anoma - // binaries are, even when ran via `cargo run`. Anoma's pre-built - // binaries are distributed with the dylib(s) in the same directory. - let dylib_dir_with_bins = || { - let anoma_path = env::current_exe().unwrap(); - anoma_path - .parent() - .map(|path| path.to_owned()) - .unwrap() - .join(&matchmaker_filename) - }; - // Anoma built from source (`make install`) will install the - // dylib(s) to `~/.cargo/lib`. - let dylib_dir_installed = || { - directories::BaseDirs::new() - .expect("Couldn't determine the $HOME directory") - .home_dir() - .join(".cargo") - .join("lib") - .join(&matchmaker_filename) - }; - // Argument with file path relative to the current dir. - let dylib_dir_in_cwd = || { - let anoma_path = env::current_dir().unwrap(); - anoma_path.join(&matchmaker_filename) - }; - - // Try to find the matchmaker lib in either directory (computed - // lazily) - let matchmaker_dylib: Option = - check_file_exists(dylib_dir_with_bins) - .or_else(|| check_file_exists(dylib_dir_installed)) - .or_else(|| check_file_exists(dylib_dir_in_cwd)); - matchmaker_dylib.unwrap_or_else(|| { - panic!( - "The matchmaker library couldn't not be found. Did you \ - build it? Attempted to find it in directories \"{}\", \ - \"{}\" and \"{}\".", - dylib_dir_with_bins().to_string_lossy(), - dylib_dir_installed().to_string_lossy(), - dylib_dir_in_cwd().to_string_lossy(), - ); - }) - }; - tracing::info!( - "Running matchmaker from {}", - matchmaker_dylib.to_string_lossy() - ); - - let matchmaker_code = - unsafe { Library::new(matchmaker_dylib).unwrap() }; - - // Instantiate the matchmaker - let new_matchmaker: libloading::Symbol< - unsafe extern "C" fn() -> *mut c_void, - > = unsafe { matchmaker_code.get(b"_new_matchmaker").unwrap() }; - - let state = MatchmakerState(Arc::new(unsafe { new_matchmaker() })); - - let r#impl = MatchmakerImpl { - state, - library: matchmaker_code, - }; - - // Run the listener for messages from the connected intent gossiper node - self.listener.take().unwrap().listen(|msg| match msg { - MsgFromServer::AddIntent { id, data } => { - self.try_match_intent(&r#impl, id, data); - } - }) - } - - /// add the intent to the matchmaker mempool and tries to find a match for - /// that intent - fn try_match_intent( - &self, - r#impl: &MatchmakerImpl, - intent_id: Vec, - intent_data: Vec, - ) { - let add_intent: libloading::Symbol< - unsafe extern "C" fn( - *mut c_void, - &Vec, - &Vec, - ) -> AddIntentResult, - > = unsafe { r#impl.library.get(b"_add_intent").unwrap() }; - - let result = - unsafe { add_intent(*r#impl.state.0, &intent_id, &intent_data) }; - - self.result_send.send(result).unwrap(); - } -} - -impl Drop for MatchmakerImpl { - fn drop(&mut self) { - let drop_matchmaker: libloading::Symbol< - unsafe extern "C" fn(*mut c_void), - > = unsafe { self.library.get(b"_drop_matchmaker").unwrap() }; - - unsafe { drop_matchmaker(*self.state.0) }; - } -} - -impl ResultHandler { - async fn run(mut self) { - while let Some(result) = self.result_recv.recv().await { - if let Some(tx) = result.tx { - self.submit_tx(tx).await - } - if let Some(intent_ids) = result.matched_intents { - self.dialer.send(MsgFromClient::Matched { intent_ids }) - } - } - } - - async fn submit_tx(&self, tx_data: Vec) { - let tx_code = self.tx_code.clone(); - let matches = MatchedExchanges::try_from_slice(&tx_data[..]).unwrap(); - let intent_transfers = IntentTransfers { - matches, - source: self.tx_source_address.clone(), - }; - let tx_data = intent_transfers.try_to_vec().unwrap(); - let to_broadcast = { - let epoch = rpc::query_epoch(args::Query { - ledger_address: self.ledger_address.clone(), - }) - .await; - let tx = WrapperTx::new( - Fee { - amount: 0.into(), - token: address::xan(), - }, - &self.tx_signing_key, - epoch, - 0.into(), - Tx::new(tx_code, Some(tx_data)).sign(&self.tx_signing_key), - // TODO: Actually use the fetched encryption key - Default::default(), - ); - let wrapper_hash = hash_tx(&tx.try_to_vec().unwrap()).to_string(); - - let decrypted_hash = tx.tx_hash.to_string(); - TxBroadcastData::Wrapper { - tx: tx - .sign(&self.tx_signing_key) - .expect("Wrapper tx signing keypair should be correct"), - wrapper_hash, - decrypted_hash, - } - }; - - let response = - broadcast_tx(self.ledger_address.clone(), &to_broadcast).await; - match response { - Ok(tx_response) => { - tracing::info!( - "Injected transaction from matchmaker with result: {:#?}", - tx_response - ); - } - Err(err) => { - tracing::error!( - "Matchmaker error in submitting a transaction to the \ - ledger: {}", - err - ); - } - } - } -} - -/// Return the path of the file returned by `lazy_path` argument, if it exists. -fn check_file_exists(lazy_path: impl Fn() -> PathBuf) -> Option { - let path = lazy_path(); - if path.exists() { Some(path) } else { None } -} diff --git a/apps/src/lib/node/mod.rs b/apps/src/lib/node/mod.rs index 2265547868..370e1150a2 100644 --- a/apps/src/lib/node/mod.rs +++ b/apps/src/lib/node/mod.rs @@ -1,3 +1 @@ -pub mod gossip; pub mod ledger; -pub mod matchmaker; diff --git a/apps/src/lib/proto/README.md b/apps/src/lib/proto/README.md deleted file mode 100644 index 33a3b4673d..0000000000 --- a/apps/src/lib/proto/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Protobuf Compiled Definitions - -**The source files in the `generated` directory are generated by build.rs, do not edit them by hand** diff --git a/apps/src/lib/proto/generated.rs b/apps/src/lib/proto/generated.rs deleted file mode 100644 index 4e379ae78b..0000000000 --- a/apps/src/lib/proto/generated.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod services; diff --git a/apps/src/lib/proto/generated/.gitignore b/apps/src/lib/proto/generated/.gitignore deleted file mode 100644 index 6f5f3d11d3..0000000000 --- a/apps/src/lib/proto/generated/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.rs diff --git a/apps/src/lib/proto/mod.rs b/apps/src/lib/proto/mod.rs deleted file mode 100644 index 363f4e5455..0000000000 --- a/apps/src/lib/proto/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod generated; -mod types; - -pub use generated::services; -pub use types::{IntentMessage, RpcMessage, SubscribeTopicMessage}; diff --git a/apps/src/lib/proto/types.rs b/apps/src/lib/proto/types.rs deleted file mode 100644 index c8ef8af7e3..0000000000 --- a/apps/src/lib/proto/types.rs +++ /dev/null @@ -1,148 +0,0 @@ -use std::convert::{TryFrom, TryInto}; - -use namada::proto::{Dkg, Error, Intent}; - -use super::generated::services; - -pub type Result = std::result::Result; - -pub enum RpcMessage { - IntentMessage(IntentMessage), - SubscribeTopicMessage(SubscribeTopicMessage), - Dkg(Dkg), -} - -impl From for services::RpcMessage { - fn from(message: RpcMessage) -> Self { - let message = match message { - RpcMessage::IntentMessage(m) => { - services::rpc_message::Message::Intent(m.into()) - } - RpcMessage::SubscribeTopicMessage(m) => { - services::rpc_message::Message::Topic(m.into()) - } - RpcMessage::Dkg(d) => services::rpc_message::Message::Dkg(d.into()), - }; - services::RpcMessage { - message: Some(message), - } - } -} - -impl RpcMessage { - pub fn new_intent(intent: Intent, topic: String) -> Self { - RpcMessage::IntentMessage(IntentMessage::new(intent, topic)) - } - - pub fn new_topic(topic: String) -> Self { - RpcMessage::SubscribeTopicMessage(SubscribeTopicMessage::new(topic)) - } - - pub fn new_dkg(dkg: Dkg) -> Self { - RpcMessage::Dkg(dkg) - } -} - -#[derive(Debug, PartialEq)] -pub struct IntentMessage { - pub intent: Intent, - pub topic: String, -} - -impl TryFrom for IntentMessage { - type Error = Error; - - fn try_from(message: services::IntentMessage) -> Result { - match message.intent { - Some(intent) => Ok(IntentMessage { - intent: intent.try_into()?, - topic: message.topic, - }), - None => Err(Error::NoIntentError), - } - } -} - -impl From for services::IntentMessage { - fn from(message: IntentMessage) -> Self { - services::IntentMessage { - intent: Some(message.intent.into()), - topic: message.topic, - } - } -} - -impl IntentMessage { - pub fn new(intent: Intent, topic: String) -> Self { - IntentMessage { intent, topic } - } -} - -#[derive(Debug, PartialEq)] -pub struct SubscribeTopicMessage { - pub topic: String, -} - -impl From for SubscribeTopicMessage { - fn from(message: services::SubscribeTopicMessage) -> Self { - SubscribeTopicMessage { - topic: message.topic, - } - } -} - -impl From for services::SubscribeTopicMessage { - fn from(message: SubscribeTopicMessage) -> Self { - services::SubscribeTopicMessage { - topic: message.topic, - } - } -} - -impl SubscribeTopicMessage { - pub fn new(topic: String) -> Self { - SubscribeTopicMessage { topic } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_intent_message() { - let data = "arbitrary data".as_bytes().to_owned(); - let intent = Intent::new(data); - let topic = "arbitrary string".to_owned(); - let intent_message = IntentMessage::new(intent.clone(), topic.clone()); - - let intent_rpc_message = RpcMessage::new_intent(intent, topic); - let services_rpc_message: services::RpcMessage = - intent_rpc_message.into(); - match services_rpc_message.message { - Some(services::rpc_message::Message::Intent(i)) => { - let message_from_types = - IntentMessage::try_from(i).expect("no intent"); - assert_eq!(intent_message, message_from_types); - } - _ => panic!("no intent message"), - } - } - - #[test] - fn test_topic_message() { - let topic = "arbitrary string".to_owned(); - let topic_message = SubscribeTopicMessage::new(topic.clone()); - - let topic_rpc_message = RpcMessage::new_topic(topic); - let services_rpc_message: services::RpcMessage = - topic_rpc_message.into(); - match services_rpc_message.message { - Some(services::rpc_message::Message::Topic(t)) => { - let message_from_types = SubscribeTopicMessage::from(t); - assert_eq!(topic_message, message_from_types); - } - _ => panic!("no intent message"), - } - } -} diff --git a/apps/src/lib/wallet/defaults.rs b/apps/src/lib/wallet/defaults.rs index d4fb1b1b83..9fbda9b76b 100644 --- a/apps/src/lib/wallet/defaults.rs +++ b/apps/src/lib/wallet/defaults.rs @@ -4,8 +4,7 @@ pub use dev::{ addresses, albert_address, albert_keypair, bertha_address, bertha_keypair, christel_address, christel_keypair, daewon_address, daewon_keypair, keys, - matchmaker_address, matchmaker_keypair, validator_address, - validator_keypair, validator_keys, + validator_address, validator_keypair, validator_keys, }; use namada::ledger::{eth_bridge, governance, pos}; use namada::types::address::Address; @@ -119,7 +118,6 @@ mod dev { ("bertha".into(), bertha_keypair()), ("christel".into(), christel_keypair()), ("daewon".into(), daewon_keypair()), - ("matchmaker".into(), matchmaker_keypair()), ("validator".into(), validator_keypair()), ] } @@ -130,7 +128,6 @@ mod dev { ("pos".into(), pos::ADDRESS), ("pos_slash_pool".into(), pos::SLASH_POOL_ADDRESS), ("governance".into(), governance::vp::ADDRESS), - ("matchmaker".into(), matchmaker_address()), ("validator".into(), validator_address()), ("albert".into(), albert_address()), ("bertha".into(), bertha_address()), @@ -170,11 +167,6 @@ mod dev { Address::decode("atest1v4ehgw36ggcnsdee8qerswph8y6ry3p5xgunvve3xaqngd3kxc6nqwz9gseyydzzg5unys3ht2n48q").expect("The token address decoding shouldn't fail") } - /// An established matchmaker address for testing & development - pub fn matchmaker_address() -> Address { - Address::decode("atest1v4ehgw36x5mnswphx565gv2yxdprzvf5gdp523jpxy6rvv6zxaznzsejxeznzseh8pp5ywz93xwala").expect("The address decoding shouldn't fail") - } - pub fn albert_keypair() -> common::SecretKey { // generated from // [`namada::types::key::ed25519::gen_keypair`] @@ -234,16 +226,4 @@ mod dev { let ed_sk = ed25519::SecretKey::try_from_slice(&bytes).unwrap(); ed_sk.try_to_sk().unwrap() } - - pub fn matchmaker_keypair() -> common::SecretKey { - // generated from - // [`namada::types::key::ed25519::gen_keypair`] - let bytes = [ - 91, 67, 244, 37, 241, 33, 157, 218, 37, 172, 191, 122, 75, 2, 44, - 219, 28, 123, 44, 34, 9, 240, 244, 49, 112, 192, 180, 98, 142, 160, - 182, 14, - ]; - let ed_sk = ed25519::SecretKey::try_from_slice(&bytes).unwrap(); - ed_sk.try_to_sk().unwrap() - } } diff --git a/apps/src/lib/wallet/keys.rs b/apps/src/lib/wallet/keys.rs index 1c521e7515..2592365016 100644 --- a/apps/src/lib/wallet/keys.rs +++ b/apps/src/lib/wallet/keys.rs @@ -5,6 +5,7 @@ use std::rc::Rc; use std::str::FromStr; use borsh::{BorshDeserialize, BorshSerialize}; +use data_encoding::HEXLOWER; use namada::types::key::*; use orion::{aead, kdf}; use serde::{Deserialize, Serialize}; @@ -108,15 +109,15 @@ pub struct EncryptedKeypair(Vec); impl Display for EncryptedKeypair { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", hex::encode(&self.0)) + write!(f, "{}", HEXLOWER.encode(self.0.as_ref())) } } impl FromStr for EncryptedKeypair { - type Err = hex::FromHexError; + type Err = data_encoding::DecodeError; fn from_str(s: &str) -> Result { - hex::decode(s).map(Self) + HEXLOWER.decode(s.as_ref()).map(Self) } } @@ -235,6 +236,6 @@ fn encryption_salt() -> kdf::Salt { /// Make encryption secret key from a password. fn encryption_key(salt: &kdf::Salt, password: String) -> kdf::SecretKey { kdf::Password::from_slice(password.as_bytes()) - .and_then(|password| kdf::derive_key(&password, salt, 3, 1 << 16, 32)) + .and_then(|password| kdf::derive_key(&password, salt, 3, 1 << 17, 32)) .expect("Generation of encryption secret key shouldn't fail") } diff --git a/apps/src/lib/wallet/mod.rs b/apps/src/lib/wallet/mod.rs index 5646416a38..18cc9ee83e 100644 --- a/apps/src/lib/wallet/mod.rs +++ b/apps/src/lib/wallet/mod.rs @@ -68,9 +68,9 @@ impl Wallet { /// addresses loaded from the genesis file, if not found. pub fn load_or_new_from_genesis( store_dir: &Path, - load_genesis: impl FnOnce() -> GenesisConfig, + genesis_cfg: GenesisConfig, ) -> Self { - let store = Store::load_or_new_from_genesis(store_dir, load_genesis) + let store = Store::load_or_new_from_genesis(store_dir, genesis_cfg) .unwrap_or_else(|err| { eprintln!("Unable to load the wallet: {}", err); cli::safe_exit(1) @@ -308,6 +308,11 @@ impl Wallet { self.store.find_address(alias) } + /// Find an alias by the address if it's in the wallet. + pub fn find_alias(&self, address: &Address) -> Option<&Alias> { + self.store.find_alias(address) + } + /// Get all known addresses by their alias, paired with PKH, if known. pub fn get_addresses(&self) -> HashMap { self.store diff --git a/apps/src/lib/wallet/store.rs b/apps/src/lib/wallet/store.rs index 88af4c9111..076e2b5a04 100644 --- a/apps/src/lib/wallet/store.rs +++ b/apps/src/lib/wallet/store.rs @@ -8,6 +8,7 @@ use std::str::FromStr; use ark_std::rand::prelude::*; use ark_std::rand::SeedableRng; +use bimap::BiHashMap; use file_lock::{FileLock, FileOptions}; use namada::types::address::{Address, ImplicitAddress}; use namada::types::key::dkg_session_keys::DkgKeypair; @@ -55,7 +56,7 @@ pub struct Store { /// Cryptographic keypairs keys: HashMap, /// Anoma address book - addresses: HashMap, + addresses: BiHashMap, /// Known mappings of public key hashes to their aliases in the `keys` /// field. Used for look-up by a public key. pkhs: HashMap, @@ -137,15 +138,15 @@ impl Store { /// the genesis file, if not found. pub fn load_or_new_from_genesis( store_dir: &Path, - load_genesis: impl FnOnce() -> GenesisConfig, + genesis_cfg: GenesisConfig, ) -> Result { Self::load(store_dir).or_else(|_| { #[cfg(not(feature = "dev"))] - let store = Self::new(load_genesis()); + let store = Self::new(genesis_cfg); #[cfg(feature = "dev")] let store = { // The function is unused in dev - let _ = load_genesis; + let _ = genesis_cfg; Self::new() }; store.save(store_dir).map_err(|err| { @@ -226,7 +227,12 @@ impl Store { /// Find the stored address by an alias. pub fn find_address(&self, alias: impl AsRef) -> Option<&Address> { - self.addresses.get(&alias.into()) + self.addresses.get_by_left(&alias.into()) + } + + /// Find an alias by the address if it's in the wallet. + pub fn find_alias(&self, address: &Address) -> Option<&Alias> { + self.addresses.get_by_right(address) } /// Get all known keys by their alias, paired with PKH, if known. @@ -250,7 +256,7 @@ impl Store { } /// Get all known addresses by their alias, paired with PKH, if known. - pub fn get_addresses(&self) -> &HashMap { + pub fn get_addresses(&self) -> &BiHashMap { &self.addresses } @@ -379,7 +385,7 @@ impl Store { alias = address.encode() ); } - if self.addresses.contains_key(&alias) { + if self.addresses.contains_left(&alias) { match show_overwrite_confirmation(&alias, "an address") { ConfirmationResponse::Replace => {} ConfirmationResponse::Reselect(new_alias) => { diff --git a/apps/src/lib/wasm_loader/mod.rs b/apps/src/lib/wasm_loader/mod.rs index b6cb424457..e82bb92452 100644 --- a/apps/src/lib/wasm_loader/mod.rs +++ b/apps/src/lib/wasm_loader/mod.rs @@ -4,8 +4,9 @@ use std::collections::HashMap; use std::fs; use std::path::Path; +use data_encoding::HEXLOWER; +use eyre::{eyre, WrapErr}; use futures::future::join_all; -use hex; use serde::{Deserialize, Serialize}; use sha2::{Digest, Sha256}; use thiserror::Error; @@ -100,9 +101,8 @@ impl Checksums { } } -/// Download all the pre-build WASMs, or if they're already downloaded, verify -/// their checksums. Download all the pre-build WASMs, or if they're already -/// downloaded, verify their checksums. +/// Download all the pre-built wasms, or if they're already downloaded, verify +/// their checksums. pub async fn pre_fetch_wasm(wasm_directory: impl AsRef) { #[cfg(feature = "dev")] { @@ -144,7 +144,7 @@ pub async fn pre_fetch_wasm(wasm_directory: impl AsRef) { Ok(bytes) => { let mut hasher = Sha256::new(); hasher.update(bytes); - let result = hex::encode(hasher.finalize()); + let result = HEXLOWER.encode(&hasher.finalize()); let derived_name = format!( "{}.{}.wasm", &name.split('.').collect::>()[0], @@ -260,67 +260,49 @@ pub async fn pre_fetch_wasm(wasm_directory: impl AsRef) { pub fn read_wasm( wasm_directory: impl AsRef, file_path: impl AsRef, -) -> Vec { +) -> eyre::Result> { // load json with wasm hashes let checksums = Checksums::read_checksums(&wasm_directory); if let Some(os_name) = file_path.as_ref().file_name() { if let Some(name) = os_name.to_str() { - match checksums.0.get(name) { + let wasm_path = match checksums.0.get(name) { Some(wasm_filename) => { - let wasm_path = wasm_directory.as_ref().join(wasm_filename); - match fs::read(&wasm_path) { - Ok(bytes) => { - return bytes; - } - Err(_) => { - eprintln!( - "File {} not found. ", - wasm_path.to_string_lossy() - ); - safe_exit(1); - } - } + wasm_directory.as_ref().join(wasm_filename) } None => { if !file_path.as_ref().is_absolute() { - match fs::read( - wasm_directory.as_ref().join(file_path.as_ref()), - ) { - Ok(bytes) => { - return bytes; - } - Err(_) => { - eprintln!( - "Could not read file {}. ", - file_path.as_ref().to_string_lossy() - ); - safe_exit(1); - } - } + wasm_directory.as_ref().join(file_path.as_ref()) } else { - match fs::read(file_path.as_ref()) { - Ok(bytes) => { - return bytes; - } - Err(_) => { - eprintln!( - "Could not read file {}. ", - file_path.as_ref().to_string_lossy() - ); - safe_exit(1); - } - } + file_path.as_ref().to_path_buf() } } - } + }; + return fs::read(&wasm_path).wrap_err_with(|| { + format!( + "Failed to read WASM from {}", + &wasm_path.to_string_lossy() + ) + }); } } - eprintln!( - "File {} does not exist.", + Err(eyre!( + "Could not read {}", file_path.as_ref().to_string_lossy() - ); - safe_exit(1); + )) +} + +pub fn read_wasm_or_exit( + wasm_directory: impl AsRef, + file_path: impl AsRef, +) -> Vec { + match read_wasm(wasm_directory, file_path) { + Ok(wasm) => wasm, + Err(err) => { + eprintln!("Error reading wasm: {}", err); + safe_exit(1); + } + } } async fn download_wasm(url: String) -> Result, Error> { diff --git a/documentation/dev/src/explore/design/ledger/governance.md b/documentation/dev/src/explore/design/ledger/governance.md index 21fdc55da1..da26c8e989 100644 --- a/documentation/dev/src/explore/design/ledger/governance.md +++ b/documentation/dev/src/explore/design/ledger/governance.md @@ -2,21 +2,21 @@ Namada introduce a governance mechanism to propose and apply protocol changes with and without the need for an hard fork. Anyone holding some M1T will be able to prosose some changes to which delegators and validator will cast their yay or nay votes. Governance on Namada supports both signaling and voting mechanism. The difference between the the two, is that the former is needed when the changes require an hard fork. In cases where the chain is not able to produce blocks anymore, Namada relies an off chain signaling mechanism to agree on a common strategy. -## Governance & Treasury addresses +## Governance & SlashFund addresses Governance introduce two internal address with their corresponding native vps: - Governance address, which is in charge of validating on-chain proposals and votes -- Treasury address, which is in charge of holding treasury funds +- SlashFund address, which is in charge of holding slashed funds Also, it introduces some protocol parameters: - `min_proposal_fund` - `max_proposal_code_size` - `min_proposal_period` +- `max_proposal_period` - `max_proposal_content_size` - `min_proposal_grace_epochs` -- `max_proposal_fund_transfer` ## On-chain proposals @@ -27,9 +27,9 @@ On-chain proposals are created under the `governance_address` storage space and, /$GovernanceAddress/min_proposal_fund: u64 /$GovernanceAddress/max_proposal_code_size: u64 /$GovernanceAddress/min_proposal_period: u64 +/$GovernanceAddress/max_proposal_period: u64 /$GovernanceAddress/max_proposal_content_size: u64 /$GovernanceAddress/min_proposal_grace_epochs: u64 -/$GovernanceAddress/max_proposal_fund_transfer: u64 ``` In order to create a valid proposal, a transaction need to modify these storage keys: @@ -51,7 +51,8 @@ and follow these rules: - be grater than `currentEpoch`, where current epoch is the epoch in which the transaction is executed and included in a block - be a multiple of `min_proposal_period`. - `endEpoch` must: - - be at least `min_proposal_period` epoch greater than `startEpoch` + - be at least `min_proposal_period` epochs greater than `startEpoch` + - be at most `max_proposal_period` epochs greater than `startEpoch` - be a multiple of `min_proposal_period` - `graceEpoch` must: - be at least `min_grace_epoch` epochs greater than `endEpoch` @@ -60,7 +61,7 @@ and follow these rules: - `content` should follow the `Namada Improvement Proposal schema` and must be less than `max_proposal_content_size` kibibytes. - `author` must be a valid address on-chain -A proposal gets accepted if, at least 2/3 of the total voting power (computed at the epoch definied in the `startEpoch` field) vote `yay`. If the proposal is accepted, the locked funds are returned to the address definied in the `proposal_author` field, otherwise are moved to the treasury address. +A proposal gets accepted if, at least 2/3 of the total voting power (computed at the epoch definied in the `startEpoch` field) vote `yay`. If the proposal is accepted, the locked funds are returned to the address definied in the `proposal_author` field, otherwise are moved to the slash fund address. The `proposal_code` field can execute arbitrary code in the form of a wasm transaction. If the proposal gets accepted, the code is executed in the first block of the epoch following the `graceEpoch`. @@ -98,7 +99,7 @@ Vote is valid if it follow this rules: The outcome of a proposal is compute at the epoch specific in the `endEpoch` field and executed at `graceEpoch` field (if it contains a non-empty `proposalCode` field). A proposal is accepted only if more than 2/3 of the voting power vote `yay`. -If a proposal gets accepted, the locked funds will be reimbursed to the author. In case it gets rejected, the locked funds will be moved to treasury. +If a proposal gets accepted, the locked funds will be reimbursed to the author. In case it gets rejected, the locked funds will be moved to slash fund. ## Off-chain proposal diff --git a/documentation/docs/src/README.md b/documentation/docs/src/README.md index 29f18f2b50..8085540251 100644 --- a/documentation/docs/src/README.md +++ b/documentation/docs/src/README.md @@ -4,12 +4,34 @@ Welcome to Namada's docs! ## About Namada -[Namada](https://namada.net/) is a sovereign proof-of-stake blockchain, using Tendermint BFT consensus, that enables multi-asset private transfers for any native or non-native asset using a multi-asset shielded pool derived from the Sapling circuit. +[Namada](https://namada.net/) is a Proof-of-Stake layer 1 protocol for asset-agnostic, interchain privacy. Namada is Anoma's first fractal instance and is currently being developed by [Heliax](https://heliax.dev), a public goods lab. -To learn more about the protocol, we recommend the following resources: +Key innovations include: +- ZCash-like transfers for any assets (fungible and non-fungible) +- Rewarded usage of privacy as a public good +- Interoperability with Ethereum via a custom bridge with trust-minimisation +- Vertically integrated user interfaces -- [Introducing Namada: Shielded Transfers with Any Assets](https://medium.com/anomanetwork/introducing-namada-shielded-transfers-with-any-assets-dce2e579384c) +To learn more, we recommend: +- Article: [Introducing Namada: Shielded Transfers with Any Assets](https://medium.com/anomanetwork/introducing-namada-shielded-transfers-with-any-assets-dce2e579384c) + +## Overview of features +- Proof-of-Stake with governance to secure and evolve Namada +- Fast-finality BFT with 4-second blocks +- Near-zero fees +- Trustless 2-way Ethereum bridge via IBC implementation on ETH +- IBC bridges to chains that already speak IBC (all Cosmos chains) +- MASP +- Convert Circuit (shielded set rewards) +- A reference interface +- Ledger application + +To learn more about the protocol, we recommend the following in-depth resources: +- Talk at ZK8 [Namada: asset-agnostic interchain privacy](https://youtu.be/5K6YxmZPFkE) - [Namada's specifications](https://specs.namada.net) +- [Codebase](https://github.com/anoma/namada) + +## About this documentation This book is written using [mdBook](https://rust-lang.github.io/mdBook/), the source can be found in the [Namada repository](https://github.com/anoma/namada/tree/main/documentation/docs). diff --git a/documentation/docs/src/SUMMARY.md b/documentation/docs/src/SUMMARY.md index 2a3acf5cf9..956eba7921 100644 --- a/documentation/docs/src/SUMMARY.md +++ b/documentation/docs/src/SUMMARY.md @@ -1,15 +1,15 @@ # Summary - [Introduction](./README.md) -- [Quick Start](./quick-start.md) -- [User Guide](./user-guide/README.md) - - [Install Namada](./user-guide/install.md) +- [Quickstart](./quick-start.md) +- [User guide](./user-guide/README.md) + - [Installing Namada](./user-guide/install.md) - [Getting started](./user-guide/getting-started.md) - - [Manage a Wallet](./user-guide/wallet.md) + - [Managing a wallet](./user-guide/wallet.md) - [The Ledger](./user-guide/ledger.md) - - [Interact with PoS](./user-guide/ledger/pos.md) + - [Interacting with PoS](./user-guide/ledger/pos.md) - [Governance](./user-guide/ledger/governance.md) - - [Private Transfers](./user-guide/ledger/masp.md) + - [Shielded transfers](./user-guide/ledger/masp.md) - [Genesis validator setup](./user-guide/genesis-validator-setup.md) - [Applying to be a genesis validator](./user-guide/genesis-validator-apply.md) - [Testnets](./testnets/README.md) diff --git a/documentation/docs/src/chapter_1.md b/documentation/docs/src/chapter_1.md deleted file mode 100644 index b743fda354..0000000000 --- a/documentation/docs/src/chapter_1.md +++ /dev/null @@ -1 +0,0 @@ -# Chapter 1 diff --git a/documentation/docs/src/quick-start.md b/documentation/docs/src/quick-start.md index 41a45c7158..ac333c6c07 100644 --- a/documentation/docs/src/quick-start.md +++ b/documentation/docs/src/quick-start.md @@ -1,17 +1,20 @@ -# Quick Start - How to run a validator node +# Quickstart - How to run a validator on Namada? ## About this guide -This guide is aimed at people interested in running a validator node and assumes basic knowledge of the terminal and how commands are used. +This guide is for the ones interested in operating a Namada validator node and assumes basic knowledge of the terminal and how commands are used. -* Comments start with `#`: - `# this is a comment make sure you read them!` -* Sample outputs start with an arrow: - `➜ this is an example command line output useful for comparing` +* Comments start with `#`: -## Install Namada +```# this is a comment make sure you read them!``` -See [the install guide](user-guide/install.md) for details on installing the Namada binaries. Commands in this guide will assume you have the Namada binaries (`namada`, `namadan`, `namadaw`, `namadac`) on your path. +* Sample outputs start with an arrow: + +```➜ this is an example command line output useful for comparing``` + +## Installing Namada + +See [the installation guide](user-guide/install.md) for details on installing the Namada binaries. Commands in this guide will assume you have the Namada binaries (`namada`, `namadan`, `namadaw`, `namadac`) on your path. ## Joining a network diff --git a/documentation/docs/src/testnets/internal-testnet-1.md b/documentation/docs/src/testnets/internal-testnet-1.md index d4ad45adb6..ef376341d8 100644 --- a/documentation/docs/src/testnets/internal-testnet-1.md +++ b/documentation/docs/src/testnets/internal-testnet-1.md @@ -165,7 +165,7 @@ specified. However, any transparent account can sign these transactions. ### Build from Source -Build the provided validity predicate, transaction and matchmaker wasm modules +Build the provided validity predicate and transaction wasm modules ```shell make build-wasm-scripts-docker diff --git a/documentation/docs/src/user-guide/README.md b/documentation/docs/src/user-guide/README.md index 422df55146..70bf35ef59 100644 --- a/documentation/docs/src/user-guide/README.md +++ b/documentation/docs/src/user-guide/README.md @@ -1,5 +1,5 @@ # User Guide -Welcome to Namada user guide! +Welcome to Namada's user guide! -This guide is intended to help you find how to install, operate and interact with the Namada ledger node, the client and the wallet. +This guide will help you find how to install, operate and interact with the Namada ledger node, the client, and the wallet. diff --git a/documentation/docs/src/user-guide/getting-started.md b/documentation/docs/src/user-guide/getting-started.md index 45154615f4..736f940b98 100644 --- a/documentation/docs/src/user-guide/getting-started.md +++ b/documentation/docs/src/user-guide/getting-started.md @@ -3,7 +3,7 @@ This guide assumes that the Namada binaries are [installed](./install.md) and available on path. These are: - `namada`: The main binary that can be used to interact with all the components of Namada -- `namadan`: The ledger and intent gossiper node +- `namadan`: The ledger node - `namadac`: The client - `namadaw`: The wallet diff --git a/documentation/docs/src/user-guide/intent-gossiper-and-matchmaker.md b/documentation/docs/src/user-guide/intent-gossiper-and-matchmaker.md deleted file mode 100644 index abacd929d6..0000000000 --- a/documentation/docs/src/user-guide/intent-gossiper-and-matchmaker.md +++ /dev/null @@ -1,124 +0,0 @@ -# The Intent gossiper and Matchmaker - -To run an intent gossiper node with an RPC server through which new intents can be submitted: - -```shell -anoma node gossip --rpc "127.0.0.1:26660" -``` - -To run a token exchange matchmaker: - -```shell -anoma node matchmaker --matchmaker-path libmm_token_exch --tx-code-path wasm/tx_from_intent.wasm --ledger-address "127.0.0.1:26657" --source matchmaker --signing-key matchmaker -``` - -Mind that `matchmaker` must be an established account known on the ledger with a key in your wallet that will be used to sign transactions submitted from the matchmaker to the ledger. - -This pre-built matchmaker implementation is [the fungible token exchange `mm_token_exch`](https://github.com/anoma/anoma/blob/5051b3abbc645aed2e40e1ff8db2d682e9a115e9/matchmaker/mm_token_exch/src/lib.rs), that is being used together with [the pre-built `tx_from_intent` transaction WASM](https://github.com/anoma/anoma/blob/5051b3abbc645aed2e40e1ff8db2d682e9a115e9/wasm/wasm_source/src/lib.rs#L140) to submit transaction from matched intents to the ledger. - -## ✋ Example intents - -1) Lets create some accounts: - - ```shell - anoma wallet key gen --alias alberto --unsafe-dont-encrypt - anoma client init-account --alias alberto-account --public-key alberto --source alberto - - anoma wallet key gen --alias christel --unsafe-dont-encrypt - anoma client init-account --alias christel-account --public-key christel --source christel - - anoma wallet key gen --alias bertha --unsafe-dont-encrypt - anoma client init-account --alias bertha-account --public-key bertha --source bertha - - anoma wallet key gen --alias my-matchmaker --unsafe-dont-encrypt - anoma client init-account --alias my-matchmaker-account --public-key my-matchmaker --source my-matchmaker - ``` - -1) We then need some tokens: - - ```shell - anoma client transfer --source faucet --target alberto-account --signer alberto-account --token BTC --amount 1000 - anoma client transfer --source faucet --target bertha-account --signer bertha-account --token ETH --amount 1000 - anoma client transfer --source faucet --target christel-account --signer christel-account --token NAM --amount 1000 - ``` - -1) Lets export some variables: - - ```shell - export ALBERTO=$(anoma wallet address find --alias alberto-account | cut -c 28- | tr -d '\n') - export CHRISTEL=$(anoma wallet address find --alias christel-account | cut -c 28- | tr -d '\n') - export BERTHA=$(anoma wallet address find --alias bertha-account | cut -c 28- | tr -d '\n') - export NAM=$(anoma wallet address find --alias NAM | cut -c 28- | tr -d '\n') - export BTC=$(anoma wallet address find --alias BTC | cut -c 28- | tr -d '\n') - export ETH=$(anoma wallet address find --alias ETH | cut -c 28- | tr -d '\n') - ``` - -1) Create files with the intents description: - - ```shell - echo '[{"addr":"'$ALBERTO'","key":"'$ALBERTO'","max_sell":"70","min_buy":"100","rate_min":"2","token_buy":"'$NAM'","token_sell":"'$BTC'","vp_path": "wasm_for_tests/vp_always_true.wasm"}]' > intent.A.data - - echo '[{"addr":"'$BERTHA'","key":"'$BERTHA'","max_sell":"300","min_buy":"50","rate_min":"0.7","token_buy":"'$BTC'","token_sell":"'$ETH'"}]' > intent.B.data - - echo '[{"addr":"'$CHRISTEL'","key":"'$CHRISTEL'","max_sell":"200","min_buy":"20","rate_min":"0.5","token_buy":"'$ETH'","token_sell":"'$NAM'"}]' > intent.C.data - ``` - -1) Start the ledger, intent gossiper and the matchmaker. Instruct the intent gossiper to subscribe to a topic "asset_v1": - - ```shell - anoma node ledger run - - anoma node gossip --rpc "127.0.0.1:26660" - - anoma node matchmaker --matchmaker-path wasm/mm_token_exch.wasm --tx-code-path wasm/tx_from_intent.wasm --ledger-address "127.0.0.1:26657" --source mm-1 --signing-key mm-1 - - anoma client subscribe-topic --node "http://127.0.0.1:26660" --topic "asset_v1" - ``` - -1) Submit the intents (the target gossiper node must be running an RPC server): - - ```shell - anoma client intent --data-path intent.A.data --topic "asset_v1" --signing-key alberto --node "http://127.0.0.1:26660" - anoma client intent --data-path intent.B.data --topic "asset_v1" --signing-key bertha --node "http://127.0.0.1:26660" - anoma client intent --data-path intent.C.data --topic "asset_v1" --signing-key christel --node "http://127.0.0.1:26660" - ``` - - The matchmaker should find a match from these intents and submit a transaction to the ledger that performs the n-party transfers of tokens. - -1) You can check the balances with: - - ```shell - anoma client balance --owner alberto-account - anoma client balance --owner bertha-account - anoma client balance --owner christel-account - ``` - -## 🤝 Custom matchmaker - -A custom matchmaker code can be built from [`matchmaker/mm_template`](https://github.com/anoma/anoma/tree/master/matchmaker/mm_template). - -The `anoma_macros::Matchmaker` macro can be used to derive the binding code for the matchmaker runner on any custom implementation, e.g.: - -```rust -#[derive(Default, Matchmaker)] -struct MyMatchmaker; -``` - -This macro requires that there is a `Default` implementation (derived or custom) for the matchmaker, which can be used by the runner to instantiate the matchmaker. - -The matchmaker must also implement `AddIntent`, e.g.: - -```rust -impl AddIntent for MyMatchmaker { - // This function will be called when a new intent is received - fn add_intent( - &mut self, - _intent_id: &Vec, - _intent_data: &Vec, - ) -> AddIntentResult { - AddIntentResult::default() - } -} -``` - -To submit a transaction from the matchmaker, add it to the `AddIntentResult` along with a hash set of the intent IDs that were matched into the transaction. diff --git a/documentation/docs/src/user-guide/ledger/masp.md b/documentation/docs/src/user-guide/ledger/masp.md index 0c50995e81..09dc0b7192 100644 --- a/documentation/docs/src/user-guide/ledger/masp.md +++ b/documentation/docs/src/user-guide/ledger/masp.md @@ -1,6 +1,6 @@ -# Private transfers +# Shielded transfers -In Namada, private transfers are enabled by the Multi-Asset Shielded Pool (MASP). The MASP is a zero-knowledge circuit (zk-SNARK) that extends the Zcash Sapling circuit to add support for sending arbitrary assets. All assets in the pool share the same anonymity set, this means that the more transactions are issued to MASP, the stronger are the privacity guarantees. +In Namada, shielded transfers are enabled by the Multi-Asset Shielded Pool (MASP). The MASP is a zero-knowledge circuit (zk-SNARK) that extends the Zcash Sapling circuit to add support for sending arbitrary assets. All assets in the pool share the same anonymity set, this means that the more transactions are issued to MASP, the stronger are the privacity guarantees. ## Using MASP diff --git a/documentation/docs/theme/favicon.png b/documentation/docs/theme/favicon.png index f34029b971..11dfb07620 100644 Binary files a/documentation/docs/theme/favicon.png and b/documentation/docs/theme/favicon.png differ diff --git a/documentation/docs/theme/favicon.svg b/documentation/docs/theme/favicon.svg index d3343977ed..c32ec9595a 100644 --- a/documentation/docs/theme/favicon.svg +++ b/documentation/docs/theme/favicon.svg @@ -1,6 +1 @@ - - - - - - + \ No newline at end of file diff --git a/documentation/specs/src/base-ledger/execution.md b/documentation/specs/src/base-ledger/execution.md index 26cfe65f22..3395ffb858 100644 --- a/documentation/specs/src/base-ledger/execution.md +++ b/documentation/specs/src/base-ledger/execution.md @@ -21,7 +21,7 @@ Supported validity predicates for Namada: - Proof-of-stake (see [spec](../economics/proof-of-stake.md)) - IBC & IbcToken (see [spec](../interoperability/ibc.md)) - Governance (see [spec](./governance.md)) - - Treasury (see [spec](./governance.md#TreasuryAddress)) + - SlashFund (see [spec](./governance.md#SlashFundAddress)) - Protocol parameters - WASM - Fungible token (see [spec](./fungible-token.md)) diff --git a/documentation/specs/src/base-ledger/governance.md b/documentation/specs/src/base-ledger/governance.md index 45b0aa0cb1..40c7281af9 100644 --- a/documentation/specs/src/base-ledger/governance.md +++ b/documentation/specs/src/base-ledger/governance.md @@ -7,7 +7,7 @@ Namada introduces a governance mechanism to propose and apply protocol changes w ### Governance Address Governance adds 2 internal addresses: - `GovernanceAddress` -- `TreasuryAddress` +- `SlashFundAddress` The first internal address contains all the proposals under its address space. The second internal address holds the funds of rejected proposals. @@ -16,18 +16,18 @@ The second internal address holds the funds of rejected proposals. Each proposal will be stored in a sub-key under the internal proposal address. The storage keys involved are: ``` -/\$GovernanceAddress/proposal/$id/content: Vec -/\$GovernanceAddress/proposal/$id/author: Address -/\$GovernanceAddress/proposal/$id/start_epoch: Epoch -/\$GovernanceAddress/proposal/$id/end_epoch: Epoch -/\$GovernanceAddress/proposal/$id/grace_epoch: Epoch -/\$GovernanceAddress/proposal/$id/proposal_code: Option> -/\$GovernanceAddress/proposal/$id/funds: u64 -/\$GovernanceAddress/proposal/epoch/$id: u64 +/\$GovernanceAddress/proposal/\$id/content: Vec +/\$GovernanceAddress/proposal/\$id/author: Address +/\$GovernanceAddress/proposal/\$id/start_epoch: Epoch +/\$GovernanceAddress/proposal/\$id/end_epoch: Epoch +/\$GovernanceAddress/proposal/\$id/grace_epoch: Epoch +/\$GovernanceAddress/proposal/\$id/proposal_code: Option> +/\$GovernanceAddress/proposal/\$id/funds: u64 +/\$GovernanceAddress/proposal/epoch/\$id: u64 ``` - `Author` address field will be used to credit the locked funds if the proposal is approved. -- `/$GovernanceAddress/proposal/$epoch/$id` is used for easing the ledger governance execution. `$epoch` refers to the same value as the on specific in the `grace_epoch` field. +- `/\$GovernanceAddress/proposal/\$epoch/\$id` is used for easing the ledger governance execution. `\$epoch` refers to the same value as the on specific in the `grace_epoch` field. - The `content` value should follow a standard format. We leverage a similar format to what is described in the [BIP2](https://github.com/bitcoin/bips/blob/master/bip-0002.mediawiki#bip-format-and-structure) document: ```json @@ -54,16 +54,15 @@ Each proposal will be stored in a sub-key under the internal proposal address. T /\$GovernanceAddress/max_proposal_content_size: u64 /\$GovernanceAddress/min_proposal_grace_epochs: u64 /\$GovernanceAddress/pending/\$proposal_id: u64 - ``` -`counter` is used to assign a unique, incremental ID to each proposal.\ -`min_proposal_fund` represents the minimum amount of locked tokens to submit a proposal.\ -`max_proposal_code_size` is the maximum allowed size (in bytes) of the proposal wasm code.\ -`min_proposal_period` sets the minimum voting time window (in `Epoch`).\ -`max_proposal_content_size` tells the maximum number of characters allowed in the proposal content.\ -`min_proposal_grace_epochs` is the minimum required time window (in `Epoch`) between `end_epoch` and the epoch in which the proposal has to be executed. -`/$GovernanceAddress/pending/$proposal_id` this storage key is written only before the execution of the code defined in `/$GovernanceAddress/proposal/$id/proposal_code` and deleted afterwards. Since this storage key can be written only by the protocol itself (and by no other means), VPs can check for the presence of this storage key to be sure that a proposal_code has been executed by the protocol and not by a transaction. +- `counter` is used to assign a unique, incremental ID to each proposal.\ +- `min_proposal_fund` represents the minimum amount of locked tokens to submit a proposal.\ +- `max_proposal_code_size` is the maximum allowed size (in bytes) of the proposal wasm code.\ +- `min_proposal_period` sets the minimum voting time window (in `Epoch`).\ +- `max_proposal_content_size` tells the maximum number of characters allowed in the proposal content.\ +- `min_proposal_grace_epochs` is the minimum required time window (in `Epoch`) between `end_epoch` and the epoch in which the proposal has to be executed. +- `/\$GovernanceAddress/pending/\$proposal_id` this storage key is written only before the execution of the code defined in `/\$GovernanceAddress/proposal/\$id/proposal_code` and deleted afterwards. Since this storage key can be written only by the protocol itself (and by no other means), VPs can check for the presence of this storage key to be sure that a proposal_code has been executed by the protocol and not by a transaction. The governance machinery also relies on a subkey stored under the `NAM` token address: @@ -167,7 +166,7 @@ All the computation above must be made at the epoch specified in the `start_epoc It is possible to check the actual implementation [here](https://github.com/anoma/namada/blob/master/shared/src/ledger/governance/utils.rs#L68). ### Refund and Proposal Execution mechanism -Together with tallying, in the first block at the beginning of each epoch, in the `FinalizeBlock` event, the protocol will manage the execution of accepted proposals and refunding. For each ended proposal with a positive outcome, it will refund the locked funds from `GovernanceAddress` to the proposal author address (specified in the proposal `author` field). For each proposal that has been rejected, instead, the locked funds will be moved to the `TreasuryAddress`. Moreover, if the proposal had a positive outcome and `proposal_code` is defined, these changes will be executed right away. +Together with tallying, in the first block at the beginning of each epoch, in the `FinalizeBlock` event, the protocol will manage the execution of accepted proposals and refunding. For each ended proposal with a positive outcome, it will refund the locked funds from `GovernanceAddress` to the proposal author address (specified in the proposal `author` field). For each proposal that has been rejected, instead, the locked funds will be moved to the `SlashFundAddress`. Moreover, if the proposal had a positive outcome and `proposal_code` is defined, these changes will be executed right away. To summarize the execution of governance in the `FinalizeBlock` event: If the proposal outcome is positive and current epoch is equal to the proposal `grace_epoch`, in the `FinalizeBlock` event: @@ -175,33 +174,28 @@ If the proposal outcome is positive and current epoch is equal to the proposal ` - execute any changes specified by `proposal_code` In case the proposal was rejected or if any error, in the `FinalizeBlock` event: -- transfer the locked funds to `TreasuryAddress` +- transfer the locked funds to `SlashFundAddress` The result is then signaled by creating and inserting a [`Tendermint Event`](https://github.com/tendermint/tendermint/blob/ab0835463f1f89dcadf83f9492e98d85583b0e71/docs/spec/abci/abci.md#events. -## TreasuryAddress -Funds locked in `TreasuryAddress` address should be spendable only by proposals. +## SlashFundAddress +Funds locked in `SlashFundAddress` address should be spendable only by proposals. -### TreasuryAddress storage +### SlashFundAddress storage ``` -/\$TreasuryAddress/max_transferable_fund: u64 -/\$TreasuryAddress/?: Vec +/\$SlashFundAddress/?: Vec ``` The funds will be stored under: ``` -/\$NAMAddress/balance/\$TreasuryAddress: u64 +/\$NAMAddress/balance/\$SlashFundAddress: u64 ``` -### TreasuryAddress VP -The treasury validity predicate will approve a transfer only if: -- the transfer has been made by the protocol (by checking the existence of `/$GovernanceAddress/pending/$proposal_id` storage key) -- the transfered amount is <= `MAX_SPENDABLE_SUM` - -`MAX_SPENDABLE_SUM` is a parameter of the treasury native vp. +### SlashFundAddress VP +The slash_fund validity predicate will approve a transfer only if the transfer has been made by the protocol (by checking the existence of `/\$GovernanceAddress/pending/\$proposal_id` storage key) -It is possible to check the actual implementation [here](https://github.com/anoma/namada/blob/master/shared/src/ledger/treasury/mod.rs#L55). +It is possible to check the actual implementation [here](https://github.com/anoma/namada/blob/main/shared/src/ledger/slash_fund/mod.rs#L70). ## Off-chain protocol @@ -241,4 +235,4 @@ Same mechanism as [on chain](#tally) tally but instead of reading the data from ## Interfaces - Ledger CLI -- Wallet \ No newline at end of file +- Wallet diff --git a/documentation/specs/src/economics/inflation-system.md b/documentation/specs/src/economics/inflation-system.md index 7570693b73..921e78d4e5 100644 --- a/documentation/specs/src/economics/inflation-system.md +++ b/documentation/specs/src/economics/inflation-system.md @@ -4,7 +4,7 @@ The Namada protocol controls the Namada token NAM (the native staking token), wh ### Proof-of-stake rewards -The security of the proof-of-stake voting power allocation mechanism used by Namada is depenedent in part upon locking (bonding) tokens to validators, where these tokens can be slashed should the validators misbehave. Funds so locked are only able to be withdrawn after an unbonding period. In order to reward validators and delegators for locking their stake and participating in the consensus mechanism, Namada pays a variable amount of inflation to all delegators and validators. The amount of inflation paid is varied on a PD-controller in order to target a particular bonding ratio (fraction of the NAM token being locked in proof-of-stake). Namada targets a bonding ratio of 2/3, paying up to 10% inflation per annum to proof-of-stake rewards. See [reward distribution mechanism](./proof-of-stake/reward-distribution.md) for details. +The security of the proof-of-stake voting power allocation mechanism used by Namada is dependent in part upon locking (bonding) tokens to validators, where these tokens can be slashed should the validators misbehave. Funds so locked are only able to be withdrawn after an unbonding period. In order to reward validators and delegators for locking their stake and participating in the consensus mechanism, Namada pays a variable amount of inflation to all delegators and validators. The amount of inflation paid is varied on a PD-controller in order to target a particular bonding ratio (fraction of the NAM token being locked in proof-of-stake). Namada targets a bonding ratio of 2/3, paying up to 10% inflation per annum to proof-of-stake rewards. See [reward distribution mechanism](./proof-of-stake/reward-distribution.md) for details. ### Shielded pool rewards @@ -20,61 +20,61 @@ Inflation is calculated and paid per-epoch as follows. First, we start with the following fixed (governance-alterable) parameters: -- $Cap_{PoS}$ is the cap of proof-of-stake reward rate, in units of percent per annum -- $Cap_{SP-A}$ is the cap of shielded pool reward rate for each asset $A$, in units of percent per annum -- $R_{PGF}$ is the public goods funding reward rate, in units of percent per annum -- $R_{PoS-Target}$ is the target staking ratio (genesis default 2/3) -- $R_{SP-A-Target}$ is the target amount of asset $A$ locked in the shielded pool (separate value for each asset $A$) -- $EpochsPerYear$ is the number of epochs per year (genesis default 365) -- ${KP}_{PoS}$ is the proportional gain of the proof-of-stake PD controller, as a fraction of the total input range -- ${KD}_{PoS}$ is the derivative gain of the proof-of-stake PD controller, as a fraction of the total input range -- ${KP}_{SP_A}$ is the proportional gain of the shielded pool reward controller for asset $A$, as a fraction of the total input range (separate value for each asset $A$) -- ${KD}_{SP_A}$ is the derivative gain of the shielded pool reward controller for asset $A$, as a fraction of the total input range (separate value for each asset $A$) +- $Cap_{PoS}$ is the cap of proof-of-stake reward rate, in units of percent per annum (genesis default: 10%) +- $Cap_{SP_A}$ is the cap of shielded pool reward rate for each asset $A$, in units of percent per annum +- $\lambda_{PGF}$ is the public goods funding reward rate, in units of percent per annum +- $R_{PoS-target}$ is the target staking ratio (genesis default: 2/3) +- $R_{SP_A-target}$ is the target amount of asset $A$ locked in the shielded pool (separate value for each asset $A$) +- $EpochsPerYear$ is the number of epochs per year (genesis default: 365) +- ${KP}_{PoS-nom}$ is the nominal proportional gain of the proof-of-stake PD controller, as a fraction of the total input range +- ${KD}_{PoS-nom}$ is the nominal derivative gain of the proof-of-stake PD controller, as a fraction of the total input range +- ${KP}_{SP_A-nom}$ is the nominal proportional gain of the shielded pool reward controller for asset $A$, as a fraction of the total input range (separate value for each asset $A$) +- ${KD}_{SP_A-nom}$ is the nominal derivative gain of the shielded pool reward controller for asset $A$, as a fraction of the total input range (separate value for each asset $A$) Second, we take as input the following state values: - $S_{NAM}$ is the current supply of NAM -- $L_{NAM}$ is the current amount of NAM locked in proof-of-stake -- $I_{PoS}$ is the current proof-of-stake reward rate, in units of tokens per epoch -- $E_{PoS-last}$ is the error in proof-of-stake lock ratio (stored from the past epoch) +- $L_{PoS}$ is the current amount of NAM locked in proof-of-stake +- $I_{PoS}$ is the current proof-of-stake inflation amount, in units of tokens per epoch +- $R_{PoS-last}$ is the proof-of-stake locked token ratio from the previous epoch - $L_{SP_A}$ is the current amount of asset $A$ locked in the shielded pool (separate value for each asset $A$) -- $I_{SP_A}$ is the current shielded pool reward rate for asset $A$, in units of tokens per epoch -- $E_{SP_A-last}$ is the error in shielded pool lock amount for asset $A$ (stored from the past epoch) (separate value for each asset $A$) +- $I_{SP_A}$ is the current shielded pool inflation amount for asset $A$, in units of tokens per epoch +- $R_{SP_A-last}$ is the shielded pool locked token ratio for asset $A$ from the previous epoch (separate value for each asset $A$) -Public goods funding inflation can be calculated and paid immediately: +Public goods funding inflation can be calculated and paid immediately (in terms of total tokens per epoch): -- $I_{PGF} := R_{PGF} * S_{NAM} / EpochsPerYear$ +- $I_{PGF} = \lambda_{PGF} * S_{NAM} / EpochsPerYear$ These tokens are distributed to the public goods funding validity predicate. To run the PD-controllers for proof-of-stake and shielded pool rewards, we first calculate some intermediate values: -- Calculate the staking ratio $R_{PoS}$ as $L_{NAM} / S_{NAM}$ -- Calculate the per-epoch cap on proof-of-stake and shielded pool reward rates - - $Cap_{PoS-Epoch} := S_{NAM} * Cap_{PoS} / EpochsPerYear$ - - $Cap_{SP_A-Epoch} := S_{NAM} * Cap_{SP_A} / EpochsPerYear$ (separate value for each $A$) -- Calculate PD-controller constants - - ${KP}_{PoS} := {KP}_{PoS} * Cap_{PoS-Epoch}$ - - ${KD}_{PoS} := {KD}_{PoS} * Cap_{PoS-Epoch}$ - - ${KP}_{SP_A} := {KP}_{SP_A} * Cap_{SP_A-Epoch}$ - - ${KD}_{SP_A} := {KD}_{SP_A} * Cap_{SP_A-Epoch}$ +- Calculate the latest staking ratio $R_{PoS}$ as $L_{PoS} / S_{NAM}$ +- Calculate the per-epoch cap on the proof-of-stake and shielded pool token inflation + - $Cap_{PoS-Epoch} = S_{NAM} * Cap_{PoS} / EpochsPerYear$ + - $Cap_{SP_A-Epoch} = S_{NAM} * Cap_{SP_A} / EpochsPerYear$ (separate value for each $A$) +- Calculate PD-controller constants to be used for this epoch + - ${KP}_{PoS} = {KP}_{PoS-nom} * Cap_{PoS-Epoch}$ + - ${KD}_{PoS} = {KD}_{PoS-nom} * Cap_{PoS-Epoch}$ + - ${KP}_{SP_A} = {KP}_{SP_A-nom} * Cap_{SP_A-Epoch}$ + - ${KD}_{SP_A} = {KD}_{SP_A-nom} * Cap_{SP_A-Epoch}$ Then, for proof-of-stake first, run the PD-controller: -- Calculate the error $E_{PoS} := R_{PoS-Target} - R_{PoS}$ -- Calculate the error derivative $E'_{PoS} := E_{PoS} - E_{PoS-last}$ -- Calculate the control value $C_{PoS} := (KP_{PoS} * E_{PoS}) - (KD_{PoS} * E'_{PoS})$ -- Calculate the new $I_{PoS} := max(0, min(I_{PoS} + C_{PoS}, Cap_{PoS}))$ +- Calculate the error $E_{PoS} = R_{PoS-target} - R_{PoS}$ +- Calculate the error derivative $E'_{PoS} = E_{PoS} - E_{PoS-last} = R_{PoS-last} - R_{PoS}$ +- Calculate the control value $C_{PoS} = (KP_{PoS} * E_{PoS}) - (KD_{PoS} * E'_{PoS})$ +- Calculate the new $I'_{PoS} = max(0, min(I_{PoS} + C_{PoS}, Cap_{PoS-Epoch}))$ These tokens are distributed to the proof-of-stake reward distribution validity predicate. Similarly, for each asset $A$ for which shielded pool rewards are being paid: -- Calculate the error $E_{SP_A} := L_{SP_A-Target} - L_{SP_A}$ -- Calculate the error derivative $E'_{SP_A} := E_{SP-A} - E_{SP_A-last}$ -- Calculate the control value $C_{SP_A} := (KP_{SP_A} * E_{SP_A}) - (KD_{SP_A} * E'{SP_A})$ -- Calculate the new $I_{SP_A} := max(0, min(I_{SP_A} + C_{SP_A}, Cap_{SP_A-Epoch}))$ +- Calculate the error $E_{SP_A} = R_{SP_A-target} - R_{SP_A}$ +- Calculate the error derivative $E'_{SP_A} = E_{SP_A} - E_{SP_A-last} = R_{SP_A-last} - R_{SP_A}$ +- Calculate the control value $C_{SP_A} = (KP_{SP_A} * E_{SP_A}) - (KD_{SP_A} * E'_{SP_A})$ +- Calculate the new $I'_{SP_A} = max(0, min(I_{SP_A} + C_{SP_A}, Cap_{SP_A-Epoch}))$ These tokens are distributed to the shielded pool reward distribution validity predicate. -Finally, we store the current inflation and error values for the next controller round. \ No newline at end of file +Finally, we store the latest inflation and locked token ratio values for the next epoch's controller round. \ No newline at end of file diff --git a/encoding_spec/Cargo.toml b/encoding_spec/Cargo.toml index 428db752b7..e6ca933d05 100644 --- a/encoding_spec/Cargo.toml +++ b/encoding_spec/Cargo.toml @@ -6,7 +6,7 @@ license = "GPL-3.0" name = "namada_encoding_spec" readme = "../README.md" resolver = "2" -version = "0.7.1" +version = "0.8.1" [features] default = [] diff --git a/genesis/dev.toml b/genesis/dev.toml index 4c48bcc279..29a4b4d791 100644 --- a/genesis/dev.toml +++ b/genesis/dev.toml @@ -114,11 +114,6 @@ address = "atest1v4ehgw36x3qng3jzggu5yvpsxgcngv2xgguy2dpkgvu5x33kx3pr2w2zgep5xwf public_key = "d06f8d4f897f329a50fd23ba5d2503bbe22fab2f14d5f625e07a65f617eb2778" vp = "vp_user" -[established.matchmaker] -address = "atest1v4ehgw36x5mnswphx565gv2yxdprzvf5gdp523jpxy6rvv6zxaznzsejxeznzseh8pp5ywz93xwala" -public_key = "f4fe03b0d3130f077e4d51cc7748baac998750476bef994a0a73ac4e7d183168" -vp = "vp_user" - # An implicit account present at genesis. # Daewon (a1qyqzsqqqqqcyvvf5xcu5vd6rg4z5233hg9pn23pjgdryzdjy8pz52wzxxscnvvjxx3rryvzz8y5p6mtz) @@ -177,12 +172,11 @@ light_client_attack_slash_rate = 500 min_proposal_fund = 500 # proposal code size in bytes max_proposal_code_size = 300000 -# proposal period length in epoch +# min proposal period length in epochs min_proposal_period = 3 +# max proposal period length in epochs +max_proposal_period = 27 # maximum number of characters in the proposal content max_proposal_content_size = 5000 # minimum epochs between end and grace epoch min_proposal_grace_epochs = 6 - -[treasury_params] -max_proposal_fund_transfer = 10000 \ No newline at end of file diff --git a/genesis/e2e-tests-single-node.toml b/genesis/e2e-tests-single-node.toml index 96fd787ca2..0e3a6d3fc8 100644 --- a/genesis/e2e-tests-single-node.toml +++ b/genesis/e2e-tests-single-node.toml @@ -1,6 +1,5 @@ # Genesis configuration source for E2E tests with: -# - 1 genesis validator and intent # gossip nodes -# - a matchmaker configured on the first validator node +# - 1 genesis validator # - User accounts same as the ones in "dev" build (Albert, Bertha, Christel) genesis_time = "2021-09-30T10:00:00Z" @@ -18,13 +17,6 @@ staking_reward_vp = "vp_user" # We set the port to be the default+1000, so that if a local node was running at # the same time as the E2E tests, it wouldn't affect them. net_address = "127.0.0.1:27656" -# This has to be an alias of one of the established accounts -matchmaker_account = "matchmaker" -# A matchmaker dylib program's name (the platform specific extension -# `(dll|dylib|so)` is added by Anoma) -matchmaker_code = "libmm_token_exch" -# A transaction WASM code used by the matchmaker -matchmaker_tx = "wasm/tx_from_intent.wasm" # Some tokens present at genesis. @@ -41,8 +33,6 @@ Christel = 1000000 Daewon = 1000000 faucet = 9223372036854 "faucet.public_key" = 100 -matchmaker = 1000000 -"matchmaker.public_key" = 1000 "validator-0.public_key" = 100 [token.BTC] @@ -110,9 +100,6 @@ faucet = 9223372036854 [established.faucet] vp = "vp_testnet_faucet" -[established.matchmaker] -vp = "vp_user" - [established.Albert] vp = "vp_user" @@ -185,12 +172,11 @@ light_client_attack_slash_rate = 500 min_proposal_fund = 500 # proposal code size in bytes max_proposal_code_size = 300000 -# proposal period length in epoch +# min proposal period length in epochs min_proposal_period = 3 +# max proposal period length in epochs +max_proposal_period = 27 # maximum number of characters in the proposal content max_proposal_content_size = 10000 # minimum epochs between end and grace epoch min_proposal_grace_epochs = 6 - -[treasury_params] -max_proposal_fund_transfer = 10000 diff --git a/macros/Cargo.toml b/macros/Cargo.toml index 19cb20fd9d..0061419311 100644 --- a/macros/Cargo.toml +++ b/macros/Cargo.toml @@ -4,7 +4,7 @@ edition = "2021" license = "GPL-3.0" name = "namada_macros" resolver = "2" -version = "0.7.1" +version = "0.8.1" [lib] proc-macro = true diff --git a/macros/src/lib.rs b/macros/src/lib.rs index afa49c66ab..a91ab297b1 100644 --- a/macros/src/lib.rs +++ b/macros/src/lib.rs @@ -1,5 +1,5 @@ -//! Anoma macros for generating WASM binding code for transactions, validity -//! predicates and matchmaker. +//! Anoma macros for generating WASM binding code for transactions and validity +//! predicates. #![doc(html_favicon_url = "https://dev.anoma.net/master/favicon.png")] #![doc(html_logo_url = "https://dev.anoma.net/master/rustdoc-logo.png")] @@ -8,14 +8,17 @@ use proc_macro::TokenStream; use quote::quote; -use syn::{parse_macro_input, DeriveInput, ItemFn}; +use syn::{parse_macro_input, ItemFn}; /// Generate WASM binding for a transaction main entrypoint function. /// /// This macro expects a function with signature: /// /// ```compiler_fail -/// fn apply_tx(tx_data: Vec) +/// fn apply_tx( +/// ctx: &mut Ctx, +/// tx_data: Vec +/// ) -> TxResult /// ``` #[proc_macro_attribute] pub fn transaction(_attr: TokenStream, input: TokenStream) -> TokenStream { @@ -38,7 +41,19 @@ pub fn transaction(_attr: TokenStream, input: TokenStream) -> TokenStream { ) }; let tx_data = slice.to_vec(); - #ident(tx_data); + + // The context on WASM side is only provided by the VM once its + // being executed (in here it's implicit). But because we want to + // have interface consistent with the VP interface, in which the + // context is explicit, in here we're just using an empty `Ctx` + // to "fake" it. + let mut ctx = unsafe { namada_tx_prelude::Ctx::new() }; + + if let Err(err) = #ident(&mut ctx, tx_data) { + namada_tx_prelude::debug_log!("Transaction error: {}", err); + // crash the transaction to abort + panic!(); + } } }; TokenStream::from(gen) @@ -50,11 +65,12 @@ pub fn transaction(_attr: TokenStream, input: TokenStream) -> TokenStream { /// /// ```compiler_fail /// fn validate_tx( +/// ctx: &Ctx, /// tx_data: Vec, /// addr: Address, /// keys_changed: BTreeSet, /// verifiers: BTreeSet
-/// ) -> bool +/// ) -> VpResult /// ``` #[proc_macro_attribute] pub fn validity_predicate( @@ -74,7 +90,6 @@ pub fn validity_predicate( #[no_mangle] extern "C" fn _validate_tx( // VP's account's address - // TODO Should the address be on demand (a call to host function?) addr_ptr: u64, addr_len: u64, tx_data_ptr: u64, @@ -113,122 +128,22 @@ pub fn validity_predicate( }; let verifiers: BTreeSet
= BTreeSet::try_from_slice(slice).unwrap(); - // run validation with the concrete type(s) - if #ident(tx_data, addr, keys_changed, verifiers) { - 1 - } else { - 0 - } - } - }; - TokenStream::from(gen) -} - -/// Derive dynamic library binding for a matchmaker implementation. -/// -/// This macro requires that the data structure implements -/// [`std::default::Default`] that is used to instantiate the matchmaker and -/// `namada::types::matchmaker::AddIntent` to implement a custom matchmaker -/// algorithm. -/// -/// # Examples -/// -/// ```compiler_fail -/// use namada::types::matchmaker::AddIntent; -/// use namada_macros::Matchmaker; -/// -/// #[derive(Default, Matchmaker)] -/// struct Matchmaker; -/// -/// impl AddIntent for Matchmaker { -/// } -/// ``` -#[proc_macro_derive(Matchmaker)] -pub fn matchmaker(input: TokenStream) -> TokenStream { - let ast = parse_macro_input!(input as DeriveInput); - let ident = &ast.ident; - // Print out the original AST and add add_intent implementation and binding - let gen = quote! { - - /// Add the marker trait - #[automatically_derived] - impl namada::types::matchmaker::Matchmaker for #ident {} - - /// Instantiate a new matchmaker and return a pointer to it. The caller is - /// responsible for making sure that the memory of the pointer will be dropped, - /// which can be done by calling the `_drop_matchmaker` function. - #[no_mangle] - #[automatically_derived] - fn _new_matchmaker() -> *mut std::ffi::c_void { - let state = Box::new(#ident::default()); - let state_ptr = Box::into_raw(state) as *mut std::ffi::c_void; - state_ptr - } - - /// Drop the matchmaker's state to reclaim its memory - #[no_mangle] - #[automatically_derived] - fn _drop_matchmaker(state_ptr: *mut std::ffi::c_void) { - // The state will be dropped on going out of scope - let _state = unsafe { Box::from_raw(state_ptr as *mut #ident) }; - } - - /// Ask the matchmaker to process a new intent - #[allow(clippy::ptr_arg)] - #[no_mangle] - #[automatically_derived] - fn _add_intent( - state_ptr: *mut std::ffi::c_void, - intent_id: &Vec, - intent_data: &Vec, - ) -> namada::types::matchmaker::AddIntentResult { - let state_ptr = state_ptr as *mut #ident; - let mut state: #ident = unsafe { std::ptr::read(state_ptr) }; - let result = state.add_intent(intent_id, intent_data); - unsafe { std::ptr::write(state_ptr, state) }; - result - } - }; - TokenStream::from(gen) -} + // The context on WASM side is only provided by the VM once its + // being executed (in here it's implicit). But because we want to + // have interface identical with the native VPs, in which the + // context is explicit, in here we're just using an empty `Ctx` + // to "fake" it. + let ctx = unsafe { namada_vp_prelude::Ctx::new() }; -/// Generate WASM binding for matchmaker filter main entrypoint function. -/// -/// This macro expects a function with signature: -/// -/// ```compiler_fail -/// fn validate_intent(intent: Vec) -> bool -/// ``` -#[proc_macro_attribute] -pub fn filter(_attr: TokenStream, input: TokenStream) -> TokenStream { - let ast = parse_macro_input!(input as ItemFn); - let ident = &ast.sig.ident; - let gen = quote! { - // Use `wee_alloc` as the global allocator. - #[global_allocator] - static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; - - #ast - - /// The module interface callable by wasm runtime - #[no_mangle] - extern "C" fn _validate_intent( - intent_data_ptr: u64, - intent_data_len: u64, - ) -> u64 { - let get_data = |ptr, len| { - let slice = unsafe { - core::slice::from_raw_parts(ptr as *const u8, len as _) - }; - slice.to_vec() - }; - - if #ident( - get_data(intent_data_ptr, intent_data_len), - ) { - 0 - } else { - 1 + // run validation with the concrete type(s) + match #ident(&ctx, tx_data, addr, keys_changed, verifiers) + { + Ok(true) => 1, + Ok(false) => 0, + Err(err) => { + namada_vp_prelude::debug_log!("Validity predicate error: {}", err); + 0 + }, } } }; diff --git a/proof_of_stake/Cargo.toml b/proof_of_stake/Cargo.toml index dbc3de0061..82522bc3d6 100644 --- a/proof_of_stake/Cargo.toml +++ b/proof_of_stake/Cargo.toml @@ -6,7 +6,7 @@ license = "GPL-3.0" name = "namada_proof_of_stake" readme = "../README.md" resolver = "2" -version = "0.7.1" +version = "0.8.1" [features] default = [] @@ -18,5 +18,6 @@ borsh = "0.9.1" thiserror = "1.0.30" # A fork with state machine testing proptest = {git = "https://github.com/heliaxdev/proptest", branch = "tomas/sm", optional = true} +derivative = "2.2.0" [dev-dependencies] diff --git a/proof_of_stake/src/epoched.rs b/proof_of_stake/src/epoched.rs index 29137b49bd..f13bec3ee0 100644 --- a/proof_of_stake/src/epoched.rs +++ b/proof_of_stake/src/epoched.rs @@ -11,7 +11,17 @@ use crate::PosParams; /// Data that may have values set for future epochs, up to an epoch at offset as /// set via the `Offset` type parameter. -#[derive(Debug, Clone, BorshDeserialize, BorshSerialize, BorshSchema)] +#[derive( + Debug, + Clone, + BorshDeserialize, + BorshSerialize, + BorshSchema, + PartialEq, + Eq, + PartialOrd, + Ord, +)] pub struct Epoched where Data: Clone + BorshDeserialize + BorshSerialize + BorshSchema, @@ -27,7 +37,17 @@ where /// Data that may have delta values (a difference from the predecessor epoch) /// set for future epochs, up to an epoch at offset as set via the `Offset` type /// parameter. -#[derive(Debug, Clone, BorshDeserialize, BorshSerialize, BorshSchema)] +#[derive( + Debug, + Clone, + BorshDeserialize, + BorshSerialize, + BorshSchema, + PartialEq, + Eq, + PartialOrd, + Ord, +)] pub struct EpochedDelta where Data: Clone @@ -56,7 +76,17 @@ pub trait EpochOffset: } /// Offset at pipeline length. -#[derive(Debug, Clone, BorshDeserialize, BorshSerialize, BorshSchema)] +#[derive( + Debug, + Clone, + BorshDeserialize, + BorshSerialize, + BorshSchema, + PartialEq, + Eq, + PartialOrd, + Ord, +)] pub struct OffsetPipelineLen; impl EpochOffset for OffsetPipelineLen { fn value(params: &PosParams) -> u64 { @@ -69,9 +99,19 @@ impl EpochOffset for OffsetPipelineLen { } /// Offset at unbonding length. -#[derive(Debug, Clone, BorshDeserialize, BorshSerialize, BorshSchema)] -pub struct OffsetUnboundingLen; -impl EpochOffset for OffsetUnboundingLen { +#[derive( + Debug, + Clone, + BorshDeserialize, + BorshSerialize, + BorshSchema, + PartialEq, + Eq, + PartialOrd, + Ord, +)] +pub struct OffsetUnbondingLen; +impl EpochOffset for OffsetUnbondingLen { fn value(params: &PosParams) -> u64 { params.unbonding_len } @@ -82,7 +122,7 @@ impl EpochOffset for OffsetUnboundingLen { } /// Offset length dynamic choice. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub enum DynEpochOffset { /// Offset at pipeline length. PipelineLen, @@ -522,21 +562,20 @@ where ); } - /// Update the delta values in reverse order (starting from the future-most - /// epoch) while the update function returns `true`. - pub fn rev_update_while( - &mut self, - mut update_value: impl FnMut(&mut Data, Epoch) -> bool, + /// Apply the given `f` function on each delta value in reverse order + /// (starting from the future-most epoch) while the given function returns + /// `true`. + pub fn rev_while( + &self, + mut f: impl FnMut(&Data, Epoch) -> bool, current_epoch: impl Into, params: &PosParams, ) { let epoch = current_epoch.into(); - self.update_data(epoch, params); - let offset = Offset::value(params) as usize; for ix in (0..offset + 1).rev() { - if let Some(Some(current)) = self.data.get_mut(ix) { - let keep_going = update_value(current, epoch + ix); + if let Some(Some(current)) = self.data.get(ix) { + let keep_going = f(current, epoch + ix); if !keep_going { break; } @@ -569,16 +608,16 @@ mod tests { sequential 1..20 => EpochedAbstractStateMachine); #[test] - fn epoched_state_machine_with_unbounding_offset( - sequential 1..20 => EpochedAbstractStateMachine); + fn epoched_state_machine_with_unbonding_offset( + sequential 1..20 => EpochedAbstractStateMachine); #[test] fn epoched_delta_state_machine_with_pipeline_offset( sequential 1..20 => EpochedDeltaAbstractStateMachine); #[test] - fn epoched_delta_state_machine_with_unbounding_offset( - sequential 1..20 => EpochedDeltaAbstractStateMachine); + fn epoched_delta_state_machine_with_unbonding_offset( + sequential 1..20 => EpochedDeltaAbstractStateMachine); } /// Abstract representation of [`Epoched`]. diff --git a/proof_of_stake/src/lib.rs b/proof_of_stake/src/lib.rs index a7bc5b9e9f..d23450b74a 100644 --- a/proof_of_stake/src/lib.rs +++ b/proof_of_stake/src/lib.rs @@ -65,7 +65,7 @@ pub trait PosReadOnly { + Copy + Add + AddAssign - + Sub + + Sub + PartialOrd + Into + From @@ -95,58 +95,68 @@ pub trait PosReadOnly { + BorshSerialize + BorshSchema; + /// Underlying read (and write in [`PosActions`]) interface errors + type Error; + /// Address of the PoS account const POS_ADDRESS: Self::Address; + /// Address of the staking token /// TODO: this should be `const`, but in the ledger `address::xan` is not a /// `const fn` fn staking_token_address() -> Self::Address; /// Read PoS parameters. - fn read_pos_params(&self) -> PosParams; + fn read_pos_params(&self) -> Result; /// Read PoS validator's staking reward address. fn read_validator_staking_reward_address( &self, key: &Self::Address, - ) -> Option; + ) -> Result, Self::Error>; /// Read PoS validator's consensus key (used for signing block votes). fn read_validator_consensus_key( &self, key: &Self::Address, - ) -> Option>; + ) -> Result>, Self::Error>; /// Read PoS validator's state. fn read_validator_state( &self, key: &Self::Address, - ) -> Option; + ) -> Result, Self::Error>; /// Read PoS validator's total deltas of their bonds (validator self-bonds /// and delegations). fn read_validator_total_deltas( &self, key: &Self::Address, - ) -> Option>; + ) -> Result>, Self::Error>; /// Read PoS validator's voting power. fn read_validator_voting_power( &self, key: &Self::Address, - ) -> Option; + ) -> Result, Self::Error>; /// Read PoS slashes applied to a validator. - fn read_validator_slashes(&self, key: &Self::Address) -> Vec; + fn read_validator_slashes( + &self, + key: &Self::Address, + ) -> Result, Self::Error>; /// Read PoS bond (validator self-bond or a delegation). fn read_bond( &self, key: &BondId, - ) -> Option>; + ) -> Result>, Self::Error>; /// Read PoS unbond (unbonded tokens from validator self-bond or a /// delegation). fn read_unbond( &self, key: &BondId, - ) -> Option>; + ) -> Result>, Self::Error>; /// Read PoS validator set (active and inactive). - fn read_validator_set(&self) -> ValidatorSets; + fn read_validator_set( + &self, + ) -> Result, Self::Error>; /// Read PoS total voting power of all validators (active and inactive). - fn read_total_voting_power(&self) -> TotalVotingPowers; + fn read_total_voting_power(&self) + -> Result; /// Read PoS validator's Eth bridge governance key fn read_validator_eth_cold_key( @@ -164,78 +174,110 @@ pub trait PosReadOnly { /// PoS system trait to be implemented in integration that can read and write /// PoS data. pub trait PosActions: PosReadOnly { + /// Error in `PosActions::become_validator` + type BecomeValidatorError: From + + From>; + + /// Error in `PosActions::bond_tokens` + type BondError: From + From>; + + /// Error in `PosActions::unbond_tokens` + type UnbondError: From + + From>; + + /// Error in `PosActions::withdraw_tokens` + type WithdrawError: From + From>; + /// Write PoS parameters. - fn write_pos_params(&mut self, params: &PosParams); - /// Write PoS validator's raw hash its address. - fn write_validator_address_raw_hash(&mut self, address: &Self::Address); + fn write_pos_params( + &mut self, + params: &PosParams, + ) -> Result<(), Self::Error>; + /// Write PoS validator's raw hash of its consensus key. + fn write_validator_address_raw_hash( + &mut self, + address: &Self::Address, + consensus_key: &Self::PublicKey, + ) -> Result<(), Self::Error>; /// Write PoS validator's staking reward address, into which staking rewards /// will be credited. fn write_validator_staking_reward_address( &mut self, key: &Self::Address, value: Self::Address, - ); + ) -> Result<(), Self::Error>; /// Write PoS validator's consensus key (used for signing block votes). fn write_validator_consensus_key( &mut self, key: &Self::Address, value: ValidatorConsensusKeys, - ); + ) -> Result<(), Self::Error>; + /// Write PoS validator's Eth bridge governance key + fn write_validator_eth_cold_key( + &mut self, + address: &Self::Address, + value: ValidatorEthKey, + ) -> Result<(), Self::Error>; + /// Write PoS validator's Eth validator set update signing key + fn write_validator_eth_hot_key( + &mut self, + address: &Self::Address, + value: ValidatorEthKey, + ) -> Result<(), Self::Error>; /// Write PoS validator's state. fn write_validator_state( &mut self, key: &Self::Address, value: ValidatorStates, - ); + ) -> Result<(), Self::Error>; /// Write PoS validator's total deltas of their bonds (validator self-bonds /// and delegations). fn write_validator_total_deltas( &mut self, key: &Self::Address, value: ValidatorTotalDeltas, - ); + ) -> Result<(), Self::Error>; /// Write PoS validator's voting power. fn write_validator_voting_power( &mut self, key: &Self::Address, value: ValidatorVotingPowers, - ); + ) -> Result<(), Self::Error>; /// Write PoS bond (validator self-bond or a delegation). fn write_bond( &mut self, key: &BondId, value: Bonds, - ); + ) -> Result<(), Self::Error>; /// Write PoS unbond (unbonded tokens from validator self-bond or a /// delegation). fn write_unbond( &mut self, key: &BondId, value: Unbonds, - ); + ) -> Result<(), Self::Error>; /// Write PoS validator set (active and inactive). - fn write_validator_set(&mut self, value: ValidatorSets); + fn write_validator_set( + &mut self, + value: ValidatorSets, + ) -> Result<(), Self::Error>; /// Write PoS total voting power of all validators (active and inactive). - fn write_total_voting_power(&mut self, value: TotalVotingPowers); - /// Write PoS validator's Eth bridge governance key - fn write_validator_eth_cold_key( + fn write_total_voting_power( &mut self, - address: &Self::Address, - value: ValidatorEthKey, - ); - - /// Write PoS validator's Eth validator set update signing key - fn write_validator_eth_hot_key( - &self, - address: &Self::Address, - value: ValidatorEthKey, - ); + value: TotalVotingPowers, + ) -> Result<(), Self::Error>; /// Delete an emptied PoS bond (validator self-bond or a delegation). - fn delete_bond(&mut self, key: &BondId); + fn delete_bond( + &mut self, + key: &BondId, + ) -> Result<(), Self::Error>; /// Delete an emptied PoS unbond (unbonded tokens from validator self-bond /// or a delegation). - fn delete_unbond(&mut self, key: &BondId); + fn delete_unbond( + &mut self, + key: &BondId, + ) -> Result<(), Self::Error>; /// Transfer tokens from the `src` to the `dest`. fn transfer( @@ -244,7 +286,7 @@ pub trait PosActions: PosReadOnly { amount: Self::TokenAmount, src: &Self::Address, dest: &Self::Address, - ); + ) -> Result<(), Self::Error>; /// Attempt to update the given account to become a validator. fn become_validator( @@ -255,25 +297,24 @@ pub trait PosActions: PosReadOnly { eth_cold_key: &Self::PublicKey, eth_hot_key: &Self::PublicKey, current_epoch: impl Into, - ) -> Result<(), BecomeValidatorError> + ) -> Result<(), Self::BecomeValidatorError> where Self::PublicKey: TryRefTo, { let current_epoch = current_epoch.into(); - let params = self.read_pos_params(); - let mut validator_set = self.read_validator_set(); - if self.is_validator(address) { - return Err(BecomeValidatorError::AlreadyValidator( - address.clone(), - )); + let params = self.read_pos_params()?; + let mut validator_set = self.read_validator_set()?; + if self.is_validator(address)? { + Err(BecomeValidatorError::AlreadyValidator(address.clone()))?; } if address == staking_reward_address { - return Err( + Err( BecomeValidatorError::StakingRewardAddressEqValidatorAddress( address.clone(), ), - ); + )?; } + let consensus_key_clone = consensus_key.clone(); let BecomeValidatorData { consensus_key, eth_cold_key, @@ -293,22 +334,26 @@ pub trait PosActions: PosReadOnly { self.write_validator_staking_reward_address( address, staking_reward_address.clone(), - ); - self.write_validator_consensus_key(address, consensus_key); - self.write_validator_eth_cold_key(address, eth_cold_key); - self.write_validator_eth_hot_key(address, eth_hot_key); - self.write_validator_state(address, state); - self.write_validator_set(validator_set); - self.write_validator_address_raw_hash(address); - self.write_validator_total_deltas(address, total_deltas); - self.write_validator_voting_power(address, voting_power); + )?; + self.write_validator_consensus_key(address, consensus_key)?; + self.write_validator_eth_cold_key(address, eth_cold_key)?; + self.write_validator_eth_hot_key(address, eth_hot_key)?; + self.write_validator_state(address, state)?; + self.write_validator_set(validator_set)?; + self.write_validator_address_raw_hash(address, &consensus_key_clone)?; + self.write_validator_total_deltas(address, total_deltas)?; + self.write_validator_voting_power(address, voting_power)?; Ok(()) } /// Check if the given address is a validator by checking that it has some /// state. - fn is_validator(&self, address: &Self::Address) -> bool { - self.read_validator_state(address).is_some() + fn is_validator( + &self, + address: &Self::Address, + ) -> Result { + let state = self.read_validator_state(address)?; + Ok(state.is_some()) } /// Self-bond tokens to a validator when `source` is `None` or equal to @@ -320,29 +365,27 @@ pub trait PosActions: PosReadOnly { validator: &Self::Address, amount: Self::TokenAmount, current_epoch: impl Into, - ) -> Result<(), BondError> { + ) -> Result<(), Self::BondError> { let current_epoch = current_epoch.into(); if let Some(source) = source { - if source != validator && self.is_validator(source) { - return Err(BondError::SourceMustNotBeAValidator( - source.clone(), - )); + if source != validator && self.is_validator(source)? { + Err(BondError::SourceMustNotBeAValidator(source.clone()))?; } } - let params = self.read_pos_params(); - let validator_state = self.read_validator_state(validator); + let params = self.read_pos_params()?; + let validator_state = self.read_validator_state(validator)?; let source = source.unwrap_or(validator); let bond_id = BondId { source: source.clone(), validator: validator.clone(), }; - let bond = self.read_bond(&bond_id); + let bond = self.read_bond(&bond_id)?; let validator_total_deltas = - self.read_validator_total_deltas(validator); + self.read_validator_total_deltas(validator)?; let validator_voting_power = - self.read_validator_voting_power(validator); - let mut total_voting_power = self.read_total_voting_power(); - let mut validator_set = self.read_validator_set(); + self.read_validator_voting_power(validator)?; + let mut total_voting_power = self.read_total_voting_power()?; + let mut validator_set = self.read_validator_set()?; let BondData { bond, @@ -360,12 +403,11 @@ pub trait PosActions: PosReadOnly { &mut validator_set, current_epoch, )?; - - self.write_bond(&bond_id, bond); - self.write_validator_total_deltas(validator, validator_total_deltas); - self.write_validator_voting_power(validator, validator_voting_power); - self.write_total_voting_power(total_voting_power); - self.write_validator_set(validator_set); + self.write_bond(&bond_id, bond)?; + self.write_validator_total_deltas(validator, validator_total_deltas)?; + self.write_validator_voting_power(validator, validator_voting_power)?; + self.write_total_voting_power(total_voting_power)?; + self.write_validator_set(validator_set)?; // Transfer the bonded tokens from the source to PoS self.transfer( @@ -373,8 +415,7 @@ pub trait PosActions: PosReadOnly { amount, source, &Self::POS_ADDRESS, - ); - + )?; Ok(()) } @@ -387,28 +428,32 @@ pub trait PosActions: PosReadOnly { validator: &Self::Address, amount: Self::TokenAmount, current_epoch: impl Into, - ) -> Result<(), UnbondError> { + ) -> Result<(), Self::UnbondError> { let current_epoch = current_epoch.into(); - let params = self.read_pos_params(); + let params = self.read_pos_params()?; let source = source.unwrap_or(validator); let bond_id = BondId { source: source.clone(), validator: validator.clone(), }; - let mut bond = - self.read_bond(&bond_id).ok_or(UnbondError::NoBondFound)?; - let unbond = self.read_unbond(&bond_id); - let mut validator_total_deltas = - self.read_validator_total_deltas(validator).ok_or_else(|| { + let mut bond = match self.read_bond(&bond_id)? { + Some(val) => val, + None => Err(UnbondError::NoBondFound)?, + }; + let unbond = self.read_unbond(&bond_id)?; + let mut validator_total_deltas = self + .read_validator_total_deltas(validator)? + .ok_or_else(|| { UnbondError::ValidatorHasNoBonds(validator.clone()) })?; - let mut validator_voting_power = - self.read_validator_voting_power(validator).ok_or_else(|| { + let mut validator_voting_power = self + .read_validator_voting_power(validator)? + .ok_or_else(|| { UnbondError::ValidatorHasNoVotingPower(validator.clone()) })?; - let slashes = self.read_validator_slashes(validator); - let mut total_voting_power = self.read_total_voting_power(); - let mut validator_set = self.read_validator_set(); + let slashes = self.read_validator_slashes(validator)?; + let mut total_voting_power = self.read_total_voting_power()?; + let mut validator_set = self.read_validator_set()?; let UnbondData { unbond } = unbond_tokens( ¶ms, @@ -431,18 +476,18 @@ pub trait PosActions: PosReadOnly { ); match total_bonds { Some(total_bonds) if total_bonds.sum() != 0.into() => { - self.write_bond(&bond_id, bond); + self.write_bond(&bond_id, bond)?; } _ => { // If the bond is left empty, delete it - self.delete_bond(&bond_id) + self.delete_bond(&bond_id)? } } - self.write_unbond(&bond_id, unbond); - self.write_validator_total_deltas(validator, validator_total_deltas); - self.write_validator_voting_power(validator, validator_voting_power); - self.write_total_voting_power(total_voting_power); - self.write_validator_set(validator_set); + self.write_unbond(&bond_id, unbond)?; + self.write_validator_total_deltas(validator, validator_total_deltas)?; + self.write_validator_voting_power(validator, validator_voting_power)?; + self.write_total_voting_power(total_voting_power)?; + self.write_validator_set(validator_set)?; Ok(()) } @@ -455,17 +500,17 @@ pub trait PosActions: PosReadOnly { source: Option<&Self::Address>, validator: &Self::Address, current_epoch: impl Into, - ) -> Result> { + ) -> Result { let current_epoch = current_epoch.into(); - let params = self.read_pos_params(); + let params = self.read_pos_params()?; let source = source.unwrap_or(validator); let bond_id = BondId { source: source.clone(), validator: validator.clone(), }; - let unbond = self.read_unbond(&bond_id); - let slashes = self.read_validator_slashes(&bond_id.validator); + let unbond = self.read_unbond(&bond_id)?; + let slashes = self.read_validator_slashes(&bond_id.validator)?; let WithdrawData { unbond, @@ -486,11 +531,11 @@ pub trait PosActions: PosReadOnly { ); match total_unbonds { Some(total_unbonds) if total_unbonds.sum() != 0.into() => { - self.write_unbond(&bond_id, unbond); + self.write_unbond(&bond_id, unbond)?; } _ => { // If the unbond is left empty, delete it - self.delete_unbond(&bond_id) + self.delete_unbond(&bond_id)? } } @@ -500,7 +545,7 @@ pub trait PosActions: PosReadOnly { withdrawn, &Self::POS_ADDRESS, source, - ); + )?; Ok(slashed) } @@ -576,7 +621,7 @@ pub trait PosBase { /// Read PoS parameters. fn read_pos_params(&self) -> PosParams; - /// Read PoS raw hash of validator's address. + /// Read PoS raw hash of validator's consensus key. fn read_validator_address_raw_hash( &self, raw_hash: impl AsRef, @@ -622,8 +667,12 @@ pub trait PosBase { /// Write PoS parameters. fn write_pos_params(&mut self, params: &PosParams); - /// Write PoS validator's raw hash its address. - fn write_validator_address_raw_hash(&mut self, address: &Self::Address); + /// Write PoS validator's raw hash of its consensus key. + fn write_validator_address_raw_hash( + &mut self, + address: &Self::Address, + consensus_key: &Self::PublicKey, + ); /// Write PoS validator's staking reward address, into which staking rewards /// will be credited. fn write_validator_staking_reward_address( @@ -750,7 +799,12 @@ pub trait PosBase { eth_cold_key, eth_hot_key, } = res?; - self.write_validator_address_raw_hash(address); + self.write_validator_address_raw_hash( + address, + consensus_key + .get(current_epoch) + .expect("Consensus key must be set"), + ); self.write_validator_staking_reward_address( address, &staking_reward_address, @@ -800,12 +854,12 @@ pub trait PosBase { let prev_validators = previous_epoch.and_then(|epoch| validators.get(epoch)); - // If the validator never been active before and it doesn't have more - // than 0 voting power, we should not tell Tendermint to update it until - // it does. Tendermint uses 0 voting power as a way to signal - // that a validator has been removed from the validator set, but - // fails if we attempt to give it a new validator with 0 voting - // power. + // If the validator has never been active before and it doesn't have + // more than 0 voting power, we should not tell Tendermint to + // update it until it does. Tendermint uses 0 voting power as a + // way to signal that a validator has been removed from the + // validator set, but fails if we attempt to give it a new + // validator with 0 voting power. // For active validators, this would only ever happen until all the // validator slots are filled with non-0 voting power validators, but we // still need to guard against it. @@ -992,7 +1046,7 @@ pub enum BondError { InactiveValidator(Address), #[error("Voting power overflow: {0}")] VotingPowerOverflow(TryFromIntError), - #[error("Given zero amount to unbond")] + #[error("Given zero amount to bond")] ZeroAmount, } @@ -1259,10 +1313,15 @@ where source: address.clone(), validator: address.clone(), }; - let mut deltas = HashMap::default(); - deltas.insert(current_epoch, *tokens); - let bond = - EpochedDelta::init_at_genesis(Bond { deltas }, current_epoch); + let mut pos_deltas = HashMap::default(); + pos_deltas.insert(current_epoch, *tokens); + let bond = EpochedDelta::init_at_genesis( + Bond { + pos_deltas, + neg_deltas: Default::default(), + }, + current_epoch, + ); Ok(GenesisValidatorData { address: address.clone(), staking_reward_address: staking_reward_address.clone(), @@ -1581,15 +1640,21 @@ where // Update or create the bond let mut value = Bond { - deltas: HashMap::default(), + pos_deltas: HashMap::default(), + neg_deltas: TokenAmount::default(), }; value - .deltas + .pos_deltas .insert(current_epoch + update_offset.value(params), amount); let bond = match current_bond { - None => EpochedDelta::init(value, current_epoch, params), + None => EpochedDelta::init_at_offset( + value, + current_epoch, + update_offset, + params, + ), Some(mut bond) => { - bond.add(value, current_epoch, params); + bond.add_at_offset(value, current_epoch, update_offset, params); bond } }; @@ -1707,6 +1772,7 @@ where + AddAssign + Into + From + + Sub + SubAssign + BorshDeserialize + BorshSerialize @@ -1717,7 +1783,7 @@ where + Clone + Copy + Add - + Sub + + Sub + From + Neg + Into @@ -1752,27 +1818,25 @@ where let mut slashed_amount = TokenAmount::default(); // Decrement the bond deltas starting from the rightmost value (a bond in a // future-most epoch) until whole amount is decremented - bond.rev_update_while( + bond.rev_while( |bonds, _epoch| { - bonds.deltas.retain(|epoch_start, bond_delta| { + for (epoch_start, bond_delta) in bonds.pos_deltas.iter() { if *to_unbond == 0.into() { return true; } let mut unbonded = HashMap::default(); let unbond_end = current_epoch + update_offset.value(params) - 1; - // We need to accumulate the slashed delta for multiple slashes - // applicable to a bond, where each slash should be - // calculated from the delta reduced by the previous slash. - let applied_delta = if to_unbond > bond_delta { + // We need to accumulate the slashed delta for multiple + // slashes applicable to a bond, where + // each slash should be calculated from + // the delta reduced by the previous slash. + let applied_delta = if *to_unbond > *bond_delta { unbonded.insert((*epoch_start, unbond_end), *bond_delta); *to_unbond -= *bond_delta; - let applied_delta = *bond_delta; - *bond_delta = 0.into(); - applied_delta + *bond_delta } else { unbonded.insert((*epoch_start, unbond_end), *to_unbond); - *bond_delta -= *to_unbond; let applied_delta = *to_unbond; *to_unbond = 0.into(); applied_delta @@ -1792,9 +1856,7 @@ where // For each decremented bond value write a new unbond unbond.add(Unbond { deltas: unbonded }, current_epoch, params); - // Remove bonds with no tokens left - *bond_delta != 0.into() - }); + } // Stop the update once all the tokens are unbonded *to_unbond != 0.into() }, @@ -1802,6 +1864,16 @@ where params, ); + bond.add_at_offset( + Bond { + pos_deltas: Default::default(), + neg_deltas: amount, + }, + current_epoch, + update_offset, + params, + ); + // Update validator set. This has to be done before we update the // `validator_total_deltas`, because we need to look-up the validator with // its voting power before the change. diff --git a/proof_of_stake/src/parameters.rs b/proof_of_stake/src/parameters.rs index 84bd59d4a5..7ee0abdf98 100644 --- a/proof_of_stake/src/parameters.rs +++ b/proof_of_stake/src/parameters.rs @@ -78,7 +78,8 @@ const MAX_TOTAL_VOTING_POWER: i64 = i64::MAX / 8; const TOKEN_MAX_AMOUNT: u64 = u64::MAX / 1_000_000; impl PosParams { - /// Validate PoS parameters values. Returns empty list the values are valid. + /// Validate PoS parameters values. Returns an empty list if the values are + /// valid. #[must_use] pub fn validate(&self) -> Vec { let mut errors = vec![]; diff --git a/proof_of_stake/src/types.rs b/proof_of_stake/src/types.rs index 1d9a08e05a..6d9841748a 100644 --- a/proof_of_stake/src/types.rs +++ b/proof_of_stake/src/types.rs @@ -11,7 +11,7 @@ use std::ops::{Add, AddAssign, Mul, Sub, SubAssign}; use borsh::{BorshDeserialize, BorshSchema, BorshSerialize}; use crate::epoched::{ - Epoched, EpochedDelta, OffsetPipelineLen, OffsetUnboundingLen, + Epoched, EpochedDelta, OffsetPipelineLen, OffsetUnbondingLen, }; use crate::parameters::PosParams; @@ -22,22 +22,21 @@ pub type ValidatorConsensusKeys = pub type ValidatorStates = Epoched; /// Epoched validator's total deltas. pub type ValidatorTotalDeltas = - EpochedDelta; + EpochedDelta; /// Epoched validator's voting power. pub type ValidatorVotingPowers = - EpochedDelta; + EpochedDelta; /// Epoched bond. pub type Bonds = - EpochedDelta, OffsetPipelineLen>; + EpochedDelta, OffsetUnbondingLen>; /// Epoched unbond. pub type Unbonds = - EpochedDelta, OffsetUnboundingLen>; + EpochedDelta, OffsetUnbondingLen>; /// Epoched validator set. pub type ValidatorSets
= - Epoched, OffsetUnboundingLen>; + Epoched, OffsetUnbondingLen>; /// Epoched total voting power. -pub type TotalVotingPowers = - EpochedDelta; +pub type TotalVotingPowers = EpochedDelta; /// Epoched validator's eth key. pub type ValidatorEthKey = Epoched; @@ -312,20 +311,24 @@ pub enum ValidatorState { // TODO consider adding `Jailed` } -/// A bond is validator's self-bond or a delegation from a regular account to a -/// validator. +/// A bond is either a validator's self-bond or a delegation from a regular +/// account to a validator. #[derive( Debug, Clone, Default, BorshDeserialize, BorshSerialize, BorshSchema, )] pub struct Bond { - /// A key is a the epoch set for the bond. This is used in unbonding, where - /// it's needed for slash epoch range check. + /// Bonded positive deltas. A key is the epoch set for the bond. This is + /// used in unbonding, where it's needed for slash epoch range check. /// /// TODO: For Bonds, there's unnecessary redundancy with this hash map. /// We only need to keep the start `Epoch` for the Epoched head element /// (i.e. the current epoch data), the rest of the array can be calculated /// from the offset from the head - pub deltas: HashMap, + pub pos_deltas: HashMap, + /// Unbonded negative deltas. The values are recorded as positive, but + /// should be subtracted when we're finding the total for some given + /// epoch. + pub neg_deltas: Token, } /// An unbond contains unbonded tokens from a validator's self-bond or a @@ -384,6 +387,12 @@ pub enum SlashType { )] pub struct BasisPoints(u64); +/// Derive Tendermint raw hash from the public key +pub trait PublicKeyTmRawHash { + /// Derive Tendermint raw hash from the public key + fn tm_raw_hash(&self) -> String; +} + impl VotingPower { /// Convert token amount into a voting power. pub fn from_tokens(tokens: impl Into, params: &PosParams) -> Self { @@ -576,13 +585,15 @@ where impl Bond where - Token: Clone + Copy + Add + Default, + Token: Clone + Copy + Add + Sub + Default, { /// Find the sum of all the bonds amounts. pub fn sum(&self) -> Token { - self.deltas + let pos_deltas_sum: Token = self + .pos_deltas .iter() - .fold(Default::default(), |acc, (_epoch, amount)| acc + *amount) + .fold(Default::default(), |acc, (_epoch, amount)| acc + *amount); + pos_deltas_sum - self.neg_deltas } } @@ -593,24 +604,26 @@ where type Output = Self; fn add(mut self, rhs: Self) -> Self::Output { - // This is almost the same as `self.delta.extend(rhs.delta);`, except - // that we add values where a key is present on both sides. - let iter = rhs.deltas.into_iter(); - let reserve = if self.deltas.is_empty() { + // This is almost the same as `self.pos_deltas.extend(rhs.pos_deltas);`, + // except that we add values where a key is present on both + // sides. + let iter = rhs.pos_deltas.into_iter(); + let reserve = if self.pos_deltas.is_empty() { iter.size_hint().0 } else { (iter.size_hint().0 + 1) / 2 }; - self.deltas.reserve(reserve); + self.pos_deltas.reserve(reserve); iter.for_each(|(k, v)| { // Add or insert - match self.deltas.get_mut(&k) { + match self.pos_deltas.get_mut(&k) { Some(value) => *value += v, None => { - self.deltas.insert(k, v); + self.pos_deltas.insert(k, v); } } }); + self.neg_deltas += rhs.neg_deltas; self } } diff --git a/proof_of_stake/src/validation.rs b/proof_of_stake/src/validation.rs index 41485bcbb0..faef13f457 100644 --- a/proof_of_stake/src/validation.rs +++ b/proof_of_stake/src/validation.rs @@ -5,24 +5,26 @@ use std::collections::HashMap; use std::convert::TryFrom; use std::fmt::{Debug, Display}; use std::hash::Hash; +use std::marker::PhantomData; use std::ops::{Add, AddAssign, Neg, Sub, SubAssign}; use borsh::{BorshDeserialize, BorshSchema, BorshSerialize}; +use derivative::Derivative; use thiserror::Error; use crate::btree_set::BTreeSetShims; use crate::epoched::DynEpochOffset; use crate::parameters::PosParams; use crate::types::{ - BondId, Bonds, Epoch, Slashes, TotalVotingPowers, Unbonds, - ValidatorConsensusKeys, ValidatorSets, ValidatorState, ValidatorStates, - ValidatorTotalDeltas, ValidatorVotingPowers, VotingPower, VotingPowerDelta, - WeightedValidator, + BondId, Bonds, Epoch, PublicKeyTmRawHash, Slash, Slashes, + TotalVotingPowers, Unbonds, ValidatorConsensusKeys, ValidatorSets, + ValidatorState, ValidatorStates, ValidatorTotalDeltas, + ValidatorVotingPowers, VotingPower, VotingPowerDelta, WeightedValidator, }; #[allow(missing_docs)] #[derive(Error, Debug)] -pub enum Error +pub enum Error where Address: Display + Debug @@ -36,6 +38,7 @@ where + BorshSchema + BorshDeserialize, TokenChange: Debug + Display, + PublicKey: Debug, { #[error("Unexpectedly missing state value for validator {0}")] ValidatorStateIsRequired(Address), @@ -96,6 +99,17 @@ where got: u64, expected: u64, }, + + #[error( + "Bond ID {id} must be subtracted at the correct epoch. Got epoch \ + {got}, expected {expected}" + )] + InvalidNegDeltaEpoch { + id: BondId
, + got: u64, + expected: u64, + }, + #[error( "Invalid validator {address} sum of total deltas. Total Δ \ {total_delta}, bonds Δ {bond_delta}" @@ -158,7 +172,7 @@ where #[error("Invalid address raw hash update")] InvalidRawHashUpdate, #[error("Invalid new validator {0}, some fields are missing: {1:?}.")] - InvalidNewValidator(Address, NewValidator), + InvalidNewValidator(Address, NewValidator), #[error("New validator {0} has not been added to the validator set.")] NewValidatorMissingInValidatorSet(Address), #[error("Validator set has not been updated for new validators.")] @@ -241,8 +255,8 @@ where ValidatorAddressRawHash { /// Raw hash value raw_hash: String, - /// The address and raw hash derived from it - data: Data<(Address, String)>, + /// The validator's address + data: Data
, }, } @@ -291,17 +305,29 @@ where /// A new validator account initialized in a transaction, which is used to check /// that all the validator's required fields have been written. -#[derive(Clone, Debug, Default)] -pub struct NewValidator { +#[derive(Clone, Debug, Derivative)] +// https://mcarton.github.io/rust-derivative/latest/Default.html#custom-bound +#[derivative(Default(bound = ""))] +pub struct NewValidator { has_state: bool, - has_consensus_key: bool, + has_consensus_key: Option, has_total_deltas: bool, has_voting_power: bool, has_staking_reward_address: bool, - has_address_raw_hash: bool, + has_address_raw_hash: Option, voting_power: VotingPower, } +/// Validation constants +#[derive(Clone, Debug)] +struct Constants { + current_epoch: Epoch, + pipeline_epoch: Epoch, + unbonding_epoch: Epoch, + pipeline_offset: u64, + unbonding_offset: u64, +} + /// Validate the given list of PoS data `changes`. Returns empty list, if all /// the changes are valid. #[must_use] @@ -309,7 +335,7 @@ pub fn validate( params: &PosParams, changes: Vec>, current_epoch: impl Into, -) -> Vec> +) -> Vec> where Address: Display + Debug @@ -362,1026 +388,171 @@ where + BorshDeserialize + BorshSerialize + BorshSchema - + PartialEq, + + PartialEq + + PublicKeyTmRawHash, { - let current_epoch = current_epoch.into(); - use DataUpdate::*; - use ValidatorUpdate::*; - + let current_epoch: Epoch = current_epoch.into(); let pipeline_offset = DynEpochOffset::PipelineLen.value(params); let unbonding_offset = DynEpochOffset::UnbondingLen.value(params); let pipeline_epoch = current_epoch + pipeline_offset; let unbonding_epoch = current_epoch + unbonding_offset; + let constants = Constants { + current_epoch, + pipeline_epoch, + unbonding_epoch, + pipeline_offset, + unbonding_offset, + }; let mut errors = vec![]; - let mut balance_delta = TokenChange::default(); - // Changes of validators' bonds - let mut bond_delta: HashMap = HashMap::default(); - // Changes of validators' unbonds - let mut unbond_delta: HashMap = HashMap::default(); - - // Changes of all validator total deltas (up to `unbonding_epoch`) - let mut total_deltas: HashMap = HashMap::default(); - // Accumulative stake calculated from validator total deltas for each epoch - // in which it has changed (the tuple of values are in pre and post state) - let mut total_stake_by_epoch: HashMap< - Epoch, - HashMap, - > = HashMap::default(); - // Total voting power delta calculated from validators' total deltas - let mut expected_total_voting_power_delta_by_epoch: HashMap< - Epoch, - VotingPowerDelta, - > = HashMap::default(); - // Changes of validators' voting power data - let mut voting_power_by_epoch: HashMap< - Epoch, - HashMap, - > = HashMap::default(); - - let mut validator_set_pre: Option> = None; - let mut validator_set_post: Option> = None; - - let mut total_voting_power_delta_by_epoch: HashMap< - Epoch, - VotingPowerDelta, - > = HashMap::default(); - - let mut new_validators: HashMap = HashMap::default(); - - for change in changes { - match change { - Validator { address, update } => match update { - State(data) => match (data.pre, data.post) { - (None, Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) - } - // Before pipeline epoch, the state must be `Pending` - for epoch in - Epoch::iter_range(current_epoch, pipeline_offset) - { - match post.get(epoch) { - Some(ValidatorState::Pending) => {} - _ => errors.push( - Error::InvalidNewValidatorState( - epoch.into(), - ), - ), - } - } - // At pipeline epoch, the state must be `Candidate` - match post.get(pipeline_epoch) { - Some(ValidatorState::Candidate) => {} - _ => errors.push(Error::InvalidNewValidatorState( - pipeline_epoch.into(), - )), - } - let validator = - new_validators.entry(address.clone()).or_default(); - validator.has_state = true; - } - (Some(pre), Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) - } - use ValidatorState::*; - // Before pipeline epoch, the only allowed state change - // is from `Inactive` to `Pending` - for epoch in - Epoch::iter_range(current_epoch, pipeline_offset) - { - match (pre.get(epoch), post.get(epoch)) { - (Some(Inactive), Some(Pending)) => {} - (Some(state_pre), Some(state_post)) - if state_pre == state_post => {} - _ => errors.push( - Error::InvalidValidatorStateUpdate( - epoch.into(), - ), - ), - } - } - // Check allowed state changes at pipeline epoch - match ( - pre.get(pipeline_epoch), - post.get(pipeline_epoch), - ) { - ( - Some(Pending), - Some(Candidate) | Some(Inactive), - ) - | (Some(Candidate), Some(Inactive)) - | ( - Some(Inactive), - Some(Candidate) | Some(Pending), - ) => {} - _ => errors.push(Error::InvalidNewValidatorState( - pipeline_epoch.into(), - )), - } - } - (Some(_), None) => errors - .push(Error::ValidatorStateIsRequired(address.clone())), - (None, None) => continue, - }, - ConsensusKey(data) => match (data.pre, data.post) { - (None, Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) - } - // The value must be known at pipeline epoch - match post.get(pipeline_epoch) { - Some(_) => {} - _ => errors.push( - Error::MissingNewValidatorConsensusKey( - pipeline_epoch.into(), - ), - ), - } - let validator = - new_validators.entry(address.clone()).or_default(); - validator.has_consensus_key = true; - } - (Some(pre), Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) - } - // Before pipeline epoch, the key must not change - for epoch in - Epoch::iter_range(current_epoch, pipeline_offset) - { - match (pre.get(epoch), post.get(epoch)) { - (Some(key_pre), Some(key_post)) - if key_pre == key_post => - { - continue; - } - _ => errors.push( - Error::InvalidValidatorConsensusKeyUpdate( - epoch.into(), - ), - ), - } - } - } - (Some(_), None) => errors - .push(Error::ValidatorStateIsRequired(address.clone())), - (None, None) => continue, - }, - StakingRewardAddress(data) => match (data.pre, data.post) { - (Some(_), Some(post)) => { - if post == address { - errors.push( - Error::StakingRewardAddressEqValidator( - address.clone(), - ), - ); - } - } - (None, Some(post)) => { - if post == address { - errors.push( - Error::StakingRewardAddressEqValidator( - address.clone(), - ), - ); - } - let validator = - new_validators.entry(address.clone()).or_default(); - validator.has_staking_reward_address = true; - } - _ => errors.push(Error::StakingRewardAddressIsRequired( - address.clone(), - )), - }, - TotalDeltas(data) => match (data.pre, data.post) { - (Some(pre), Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) - } - // Changes of all total deltas (up to `unbonding_epoch`) - let mut deltas = TokenChange::default(); - // Sum of pre total deltas - let mut pre_deltas_sum = TokenChange::default(); - // Sum of post total deltas - let mut post_deltas_sum = TokenChange::default(); - // Iter from the first epoch to the last epoch of `post` - for epoch in Epoch::iter_range( - current_epoch, - unbonding_offset + 1, - ) { - // Changes of all total deltas (up to - // `unbonding_epoch`) - let mut delta = TokenChange::default(); - // Find the delta in `pre` - if let Some(change) = { - if epoch == current_epoch { - // On the first epoch, we have to get the - // sum of all deltas at and before that - // epoch as the `pre` could have been set in - // an older epoch - pre.get(epoch) - } else { - pre.get_delta_at_epoch(epoch).copied() - } - } { - delta -= change; - pre_deltas_sum += change; - } - // Find the delta in `post` - if let Some(change) = post.get_delta_at_epoch(epoch) - { - delta += *change; - post_deltas_sum += *change; - let stake_pre: i128 = - Into::into(pre_deltas_sum); - let stake_post: i128 = - Into::into(post_deltas_sum); - match ( - u64::try_from(stake_pre), - u64::try_from(stake_post), - ) { - (Ok(stake_pre), Ok(stake_post)) => { - let stake_pre = - TokenAmount::from(stake_pre); - let stake_post = - TokenAmount::from(stake_post); - total_stake_by_epoch - .entry(epoch) - .or_insert_with(HashMap::default) - .insert( - address.clone(), - (stake_pre, stake_post), - ); - } - _ => errors.push( - Error::InvalidValidatorTotalDeltas( - address.clone(), - stake_post, - ), - ), - } - } - deltas += delta; - // A total delta can only be increased at - // `pipeline_offset` from bonds and decreased at - // `unbonding_offset` from unbonding - if delta > TokenChange::default() - && epoch != pipeline_epoch - { - errors.push(Error::EpochedDataWrongEpoch { - got: epoch.into(), - expected: vec![pipeline_epoch.into()], - }) - } - if delta < TokenChange::default() - && epoch != unbonding_epoch - { - errors.push(Error::EpochedDataWrongEpoch { - got: epoch.into(), - expected: vec![unbonding_epoch.into()], - }) - } - } - if post_deltas_sum < TokenChange::default() { - errors.push(Error::NegativeValidatorTotalDeltasSum( - address.clone(), - )) - } - if deltas != TokenChange::default() { - let deltas_entry = total_deltas - .entry(address.clone()) - .or_default(); - *deltas_entry += deltas; - } - } - (None, Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) - } - // Changes of all total deltas (up to `unbonding_epoch`) - let mut deltas = TokenChange::default(); - for epoch in Epoch::iter_range( - current_epoch, - unbonding_offset + 1, - ) { - if let Some(change) = post.get_delta_at_epoch(epoch) - { - // A new total delta can only be initialized - // at `pipeline_offset` (from bonds) and updated - // at `unbonding_offset` (from unbonding) - if epoch != pipeline_epoch - && epoch != unbonding_epoch - { - errors.push(Error::EpochedDataWrongEpoch { - got: epoch.into(), - expected: vec![pipeline_epoch.into()], - }) - } - deltas += *change; - let stake: i128 = Into::into(deltas); - match u64::try_from(stake) { - Ok(stake) => { - let stake = TokenAmount::from(stake); - total_stake_by_epoch - .entry(epoch) - .or_insert_with(HashMap::default) - .insert( - address.clone(), - (0.into(), stake), - ); - } - Err(_) => errors.push( - Error::InvalidValidatorTotalDeltas( - address.clone(), - stake, - ), - ), - } - } - } - if deltas < TokenChange::default() { - errors.push(Error::NegativeValidatorTotalDeltasSum( - address.clone(), - )) - } - if deltas != TokenChange::default() { - let deltas_entry = total_deltas - .entry(address.clone()) - .or_default(); - *deltas_entry += deltas; - } - let validator = - new_validators.entry(address.clone()).or_default(); - validator.has_total_deltas = true; - } - (Some(_), None) => { - errors.push(Error::MissingValidatorTotalDeltas(address)) - } - (None, None) => continue, - }, - VotingPowerUpdate(data) => match (&data.pre, data.post) { - (Some(_), Some(post)) | (None, Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) - } - let mut voting_power = VotingPowerDelta::default(); - // Iter from the current epoch to the last epoch of - // `post` - for epoch in Epoch::iter_range( - current_epoch, - unbonding_offset + 1, - ) { - if let Some(delta_post) = - post.get_delta_at_epoch(epoch) - { - voting_power += *delta_post; + let Accumulator { + balance_delta, + bond_delta, + unbond_delta, + total_deltas, + total_stake_by_epoch, + expected_total_voting_power_delta_by_epoch, + voting_power_by_epoch, + validator_set_pre, + validator_set_post, + total_voting_power_delta_by_epoch, + new_validators, + } = Validate::::accumulate_changes( + changes, params, &constants, &mut errors + ); - // If the delta is not the same as in pre-state, - // accumulate the expected total voting power - // change - let delta_pre = data - .pre - .as_ref() - .and_then(|data| { - if epoch == current_epoch { - // On the first epoch, we have to - // get the sum of all deltas at and - // before that epoch as the `pre` - // could have been set in an older - // epoch - data.get(epoch) - } else { - data.get_delta_at_epoch(epoch) - .copied() - } - }) - .unwrap_or_default(); - if delta_pre != *delta_post { - let current_delta = - expected_total_voting_power_delta_by_epoch - .entry(epoch) - .or_insert_with(Default::default); - *current_delta += *delta_post - delta_pre; - } + // Check total deltas against bonds + for (validator, total_delta) in total_deltas.iter() { + let bond_delta = bond_delta.get(validator).copied().unwrap_or_default(); + let total_delta = *total_delta; + if total_delta != bond_delta { + errors.push(Error::InvalidValidatorTotalDeltasSum { + address: validator.clone(), + total_delta, + bond_delta, + }) + } + } + // Check that all bonds also have a total deltas update + for validator in bond_delta.keys() { + if !total_deltas.contains_key(validator) { + errors.push(Error::MissingValidatorTotalDeltas(validator.clone())) + } + } + // Check that all positive unbond deltas also have a total deltas update. + // Negative unbond delta is from withdrawing, which removes tokens from + // unbond, but doesn't affect total deltas. + for (validator, delta) in &unbond_delta { + if *delta > TokenChange::default() + && !total_deltas.contains_key(validator) + { + errors.push(Error::MissingValidatorTotalDeltas(validator.clone())); + } + } - let vp: i64 = Into::into(voting_power); - match u64::try_from(vp) { - Ok(vp) => { - let vp = VotingPower::from(vp); - voting_power_by_epoch - .entry(epoch) - .or_insert_with(HashMap::default) - .insert(address.clone(), vp); - } - Err(_) => errors.push( - Error::InvalidValidatorVotingPower( - address.clone(), - vp, - ), - ), - } - } - } - if data.pre.is_none() { - let validator = new_validators - .entry(address.clone()) - .or_default(); - validator.has_voting_power = true; - validator.voting_power = post - .get_at_offset( - current_epoch, - DynEpochOffset::PipelineLen, - params, - ) - .unwrap_or_default() - .try_into() - .unwrap_or_default() - } - } - (Some(_), None) => errors.push( - Error::MissingValidatorVotingPower(address.clone()), - ), - (None, None) => continue, - }, - }, - Balance(data) => match (data.pre, data.post) { - (None, Some(post)) => balance_delta += TokenChange::from(post), - (Some(pre), Some(post)) => { - balance_delta -= TokenChange::from(pre); - balance_delta += TokenChange::from(post); + // Check validator sets against validator total stakes. + // Iter from the first epoch to the last epoch of `validator_set_post` + if let Some(post) = &validator_set_post { + for epoch in Epoch::iter_range(current_epoch, unbonding_offset + 1) { + if let Some(post) = post.get_at_epoch(epoch) { + // Check that active validators length is not over the limit + if post.active.len() > params.max_validator_slots as usize { + errors.push(Error::TooManyActiveValidators) } - (Some(_), None) => errors.push(Error::MissingBalance), - (None, None) => continue, - }, - Bond { id, data, slashes } => match (data.pre, data.post) { - // Bond may be updated from newly bonded tokens and unbonding - (Some(pre), Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) + // Check that all active have voting power >= any inactive + if let ( + Some(max_inactive_validator), + Some(min_active_validator), + ) = (post.inactive.last_shim(), post.active.first_shim()) + { + if max_inactive_validator.voting_power + > min_active_validator.voting_power + { + errors.push(Error::ValidatorSetOutOfOrder( + max_inactive_validator.clone(), + min_active_validator.clone(), + )); } - let pre_offset: u64 = - match current_epoch.checked_sub(pre.last_update()) { - Some(offset) => offset.into(), - None => { - // If the last_update > current_epoch, the check - // above must have failed with - // `Error::InvalidLastUpdate` - continue; - } - }; - - // Pre-bonds keyed by their `start_epoch` - let mut pre_bonds: HashMap = - HashMap::default(); - // We have to slash only the difference between post and - // pre, not both pre and post to avoid rounding errors - let mut slashed_deltas: HashMap = - HashMap::default(); + } - // Iter from the first epoch of `pre` to the last epoch of - // `post` - for epoch in Epoch::iter_range( - pre.last_update(), - pre_offset + pipeline_offset + 1, - ) { - if let Some(bond) = pre.get_delta_at_epoch(epoch) { - for (start_epoch, delta) in bond.deltas.iter() { - let delta = TokenChange::from(*delta); - slashed_deltas.insert(*start_epoch, -delta); - pre_bonds.insert(*start_epoch, delta); - } - } - if let Some(bond) = post.get_delta_at_epoch(epoch) { - for (start_epoch, delta) in bond.deltas.iter() { - // An empty bond must be deleted - if *delta == TokenAmount::default() { - errors.push(Error::EmptyBond(id.clone())) - } - // On the current epoch, all bond's - // `start_epoch`s must be equal or lower than - // `current_epoch`. For all others, the - // `start_epoch` must be equal - // to the `epoch` at which it's set. - if (epoch == current_epoch - && *start_epoch > current_epoch) - || (epoch != current_epoch - && *start_epoch != epoch) - { - errors.push(Error::InvalidBondStartEpoch { - id: id.clone(), - got: (*start_epoch).into(), - expected: epoch.into(), - }) - } - let delta = TokenChange::from(*delta); - match slashed_deltas.get_mut(start_epoch) { - Some(pre_delta) => { - if *pre_delta + delta == 0_i128.into() { - slashed_deltas.remove(start_epoch); - } else { - *pre_delta += delta; - } - } - None => { - slashed_deltas - .insert(*start_epoch, delta); + match validator_set_pre.as_ref().and_then(|pre| pre.get(epoch)) + { + Some(pre) => { + let total_stakes = total_stake_by_epoch + .get(&epoch) + .map(Cow::Borrowed) + .unwrap_or_else(|| Cow::Owned(HashMap::default())); + // Check active validators + for validator in &post.active { + match total_stakes.get(&validator.address) { + Some((_stake_pre, stake_post)) => { + let voting_power = VotingPower::from_tokens( + *stake_post, + params, + ); + // Any validator who's total deltas changed, + // should + // be up-to-date + if validator.voting_power != voting_power { + errors.push( + Error::InvalidActiveValidator( + validator.clone(), + ), + ) } } + None => { + // Others must be have the same voting power + // as in pre (active or inactive), or be a + // newly added validator + if !pre.active.contains(validator) + && !pre.inactive.contains(validator) + && !new_validators + .contains_key(&validator.address) + { + let mut is_valid = false; - // Anywhere other than at `pipeline_offset` - // where new bonds are added, check against the - // data in `pre_bonds` to ensure that no new - // bond has been added and that the deltas are - // equal or lower to `pre_bonds` deltas. - // Note that any bonds from any epoch can be - // unbonded, even if they are not yet active. - if epoch != pipeline_epoch { - match pre_bonds.get(start_epoch) { - Some(pre_delta) => { - if &delta > pre_delta { - errors.push( - Error::InvalidNewBondEpoch { - id: id.clone(), - got: epoch.into(), - expected: pipeline_epoch - .into(), - }); + // It's also possible that for this + // validator there has been no change in + // this epoch, but in an earlier epoch. + // We attempt to search for it below and + // if the voting power matches the + // stake, this is valid. + let mut search_epoch = + u64::from(epoch) - 1; + while search_epoch + >= current_epoch.into() + { + if let Some(( + _take_pre, + last_total_stake, + )) = total_stake_by_epoch + .get(&search_epoch.into()) + .and_then(|stakes| { + stakes + .get(&validator.address) + }) + { + let voting_power = + VotingPower::from_tokens( + *last_total_stake, + params, + ); + is_valid = validator + .voting_power + == voting_power; + break; + } else { + search_epoch -= 1; } } - None => { + if !is_valid { errors.push( - Error::InvalidNewBondEpoch { - id: id.clone(), - got: epoch.into(), - expected: (current_epoch - + pipeline_offset) - .into(), - }, - ); - } - } - } - } - } - } - // Check slashes - for (start_epoch, delta) in slashed_deltas.iter_mut() { - for slash in &slashes { - if slash.epoch >= *start_epoch { - let raw_delta: i128 = (*delta).into(); - let current_slashed = - TokenChange::from(slash.rate * raw_delta); - *delta -= current_slashed; - } - } - } - let total = slashed_deltas - .values() - .fold(TokenChange::default(), |acc, delta| { - acc + *delta - }); - if total != TokenChange::default() { - let bond_entry = - bond_delta.entry(id.validator).or_default(); - *bond_entry += total; - } - } - // Bond may be created from newly bonded tokens only - (None, Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) - } - let mut total_delta = TokenChange::default(); - for epoch in - Epoch::iter_range(current_epoch, pipeline_offset + 1) - { - if let Some(bond) = post.get_delta_at_epoch(epoch) { - // A new bond must be initialized at - // `pipeline_offset` - if epoch != pipeline_epoch { - errors.push(Error::EpochedDataWrongEpoch { - got: epoch.into(), - expected: vec![pipeline_epoch.into()], - }) - } - for (start_epoch, delta) in bond.deltas.iter() { - if *start_epoch != epoch { - errors.push(Error::InvalidBondStartEpoch { - id: id.clone(), - got: (*start_epoch).into(), - expected: epoch.into(), - }) - } - let mut delta = *delta; - // Check slashes - for slash in &slashes { - if slash.epoch >= *start_epoch { - let raw_delta: u64 = delta.into(); - let current_slashed = TokenAmount::from( - slash.rate * raw_delta, - ); - delta -= current_slashed; - } - } - let delta = TokenChange::from(delta); - total_delta += delta - } - } - } - // An empty bond must be deleted - if total_delta == TokenChange::default() { - errors.push(Error::EmptyBond(id.clone())) - } - let bond_entry = - bond_delta.entry(id.validator).or_default(); - *bond_entry += total_delta; - } - // Bond may be deleted when all the tokens are unbonded - (Some(pre), None) => { - let mut total_delta = TokenChange::default(); - for index in 0..pipeline_offset + 1 { - let index = index as usize; - let epoch = pre.last_update() + index; - if let Some(bond) = pre.get_delta_at_epoch(epoch) { - for (start_epoch, delta) in &bond.deltas { - let mut delta = *delta; - // Check slashes - for slash in &slashes { - if slash.epoch >= *start_epoch { - let raw_delta: u64 = delta.into(); - let current_slashed = TokenAmount::from( - slash.rate * raw_delta, - ); - delta -= current_slashed; - } - } - let delta = TokenChange::from(delta); - total_delta -= delta - } - } - } - let bond_entry = - bond_delta.entry(id.validator).or_default(); - *bond_entry += total_delta; - } - (None, None) => continue, - }, - Unbond { id, data, slashes } => match (data.pre, data.post) { - // Unbond may be updated from newly unbonded tokens - (Some(pre), Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) - } - let pre_offset: u64 = - match current_epoch.checked_sub(pre.last_update()) { - Some(offset) => offset.into(), - None => { - // If the last_update > current_epoch, the check - // above must have failed with - // `Error::InvalidLastUpdate` - continue; - } - }; - - // We have to slash only the difference between post and - // pre, not both pre and post to avoid rounding errors - let mut slashed_deltas: HashMap< - (Epoch, Epoch), - TokenChange, - > = HashMap::default(); - // Iter from the first epoch of `pre` to the last epoch of - // `post` - for epoch in Epoch::iter_range( - pre.last_update(), - pre_offset + unbonding_offset + 1, - ) { - if let Some(unbond) = pre.get_delta_at_epoch(epoch) { - for ((start_epoch, end_epoch), delta) in - unbond.deltas.iter() - { - let delta = TokenChange::from(*delta); - slashed_deltas - .insert((*start_epoch, *end_epoch), -delta); - } - } - if let Some(unbond) = post.get_delta_at_epoch(epoch) { - for ((start_epoch, end_epoch), delta) in - unbond.deltas.iter() - { - let delta = TokenChange::from(*delta); - let key = (*start_epoch, *end_epoch); - match slashed_deltas.get_mut(&key) { - Some(pre_delta) => { - if *pre_delta + delta == 0_i128.into() { - slashed_deltas.remove(&key); - } else { - *pre_delta += delta; - } - } - None => { - slashed_deltas.insert(key, delta); - } - } - } - } - } - // Check slashes - for ((start_epoch, end_epoch), delta) in - slashed_deltas.iter_mut() - { - for slash in &slashes { - if slash.epoch >= *start_epoch - && slash.epoch <= *end_epoch - { - let raw_delta: i128 = (*delta).into(); - let current_slashed = - TokenChange::from(slash.rate * raw_delta); - *delta -= current_slashed; - } - } - } - let total = slashed_deltas - .values() - .fold(TokenChange::default(), |acc, delta| { - acc + *delta - }); - if total != TokenChange::default() { - let unbond_entry = - unbond_delta.entry(id.validator).or_default(); - *unbond_entry += total; - } - } - // Unbond may be created from a bond - (None, Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) - } - let mut total_delta = TokenChange::default(); - for epoch in Epoch::iter_range( - post.last_update(), - unbonding_offset + 1, - ) { - if let Some(unbond) = post.get_delta_at_epoch(epoch) { - for ((start_epoch, end_epoch), delta) in - unbond.deltas.iter() - { - let mut delta = *delta; - // Check and apply slashes, if any - for slash in &slashes { - if slash.epoch >= *start_epoch - && slash.epoch <= *end_epoch - { - let raw_delta: u64 = delta.into(); - let current_slashed = TokenAmount::from( - slash.rate * raw_delta, - ); - delta -= current_slashed; - } - } - let delta = TokenChange::from(delta); - total_delta += delta; - } - } - } - let unbond_entry = - unbond_delta.entry(id.validator).or_default(); - *unbond_entry += total_delta; - } - // Unbond may be deleted when all the tokens are withdrawn - (Some(pre), None) => { - let mut total_delta = TokenChange::default(); - for epoch in Epoch::iter_range( - pre.last_update(), - unbonding_offset + 1, - ) { - if let Some(unbond) = pre.get_delta_at_epoch(epoch) { - for ((start_epoch, end_epoch), delta) in - unbond.deltas.iter() - { - let mut delta = *delta; - // Check and apply slashes, if any - for slash in &slashes { - if slash.epoch >= *start_epoch - && slash.epoch <= *end_epoch - { - let raw_delta: u64 = delta.into(); - let current_slashed = TokenAmount::from( - slash.rate * raw_delta, - ); - delta -= current_slashed; - } - } - let delta = TokenChange::from(delta); - total_delta -= delta; - } - } - } - let unbond_entry = - unbond_delta.entry(id.validator).or_default(); - *unbond_entry += total_delta; - } - (None, None) => continue, - }, - ValidatorSet(data) => match (data.pre, data.post) { - (Some(pre), Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) - } - validator_set_pre = Some(pre); - validator_set_post = Some(post); - } - _ => errors.push(Error::MissingValidatorSet), - }, - TotalVotingPower(data) => match (data.pre, data.post) { - (Some(pre), Some(post)) => { - if post.last_update() != current_epoch { - errors.push(Error::InvalidLastUpdate) - } - // Iter from the first epoch to the last epoch of `post` - for epoch in Epoch::iter_range( - post.last_update(), - unbonding_offset + 1, - ) { - // Find the delta in `pre` - let delta_pre = (if epoch == post.last_update() { - // On the first epoch, we have to get the - // sum of all deltas at and before that - // epoch as the `pre` could have been set in - // an older epoch - pre.get(epoch) - } else { - pre.get_delta_at_epoch(epoch).copied() - }) - .unwrap_or_default(); - // Find the delta in `post` - let delta_post = post - .get_delta_at_epoch(epoch) - .copied() - .unwrap_or_default(); - if delta_pre != delta_post { - total_voting_power_delta_by_epoch - .insert(epoch, delta_post - delta_pre); - } - } - } - _ => errors.push(Error::MissingTotalVotingPower), - }, - ValidatorAddressRawHash { raw_hash, data } => { - match (data.pre, data.post) { - (None, Some((address, expected_raw_hash))) => { - if raw_hash != expected_raw_hash { - errors.push(Error::InvalidAddressRawHash( - raw_hash, - expected_raw_hash, - )) - } - let validator = - new_validators.entry(address.clone()).or_default(); - validator.has_address_raw_hash = true; - } - (pre, post) if pre != post => { - errors.push(Error::InvalidRawHashUpdate) - } - _ => continue, - } - } - } - } - - // Check total deltas against bonds - for (validator, total_delta) in total_deltas.iter() { - let bond_delta = bond_delta.get(validator).copied().unwrap_or_default(); - let total_delta = *total_delta; - if total_delta != bond_delta { - errors.push(Error::InvalidValidatorTotalDeltasSum { - address: validator.clone(), - total_delta, - bond_delta, - }) - } - } - // Check that all bonds also have a total deltas update - for validator in bond_delta.keys() { - if !total_deltas.contains_key(validator) { - errors.push(Error::MissingValidatorTotalDeltas(validator.clone())) - } - } - // Check that all positive unbond deltas also have a total deltas update. - // Negative unbond delta is from withdrawing, which removes tokens from - // unbond, but doesn't affect total deltas. - for (validator, delta) in &unbond_delta { - if *delta > TokenChange::default() - && !total_deltas.contains_key(validator) - { - errors.push(Error::MissingValidatorTotalDeltas(validator.clone())); - } - } - - // Check validator sets against validator total stakes. - // Iter from the first epoch to the last epoch of `validator_set_post` - if let Some(post) = &validator_set_post { - for epoch in Epoch::iter_range(current_epoch, unbonding_offset + 1) { - if let Some(post) = post.get_at_epoch(epoch) { - // Check that active validators length is not over the limit - if post.active.len() > params.max_validator_slots as usize { - errors.push(Error::TooManyActiveValidators) - } - // Check that all active have voting power >= any inactive - if let ( - Some(max_inactive_validator), - Some(min_active_validator), - ) = (post.inactive.last_shim(), post.active.first_shim()) - { - if max_inactive_validator.voting_power - > min_active_validator.voting_power - { - errors.push(Error::ValidatorSetOutOfOrder( - max_inactive_validator.clone(), - min_active_validator.clone(), - )); - } - } - - match validator_set_pre.as_ref().and_then(|pre| pre.get(epoch)) - { - Some(pre) => { - let total_stakes = total_stake_by_epoch - .get(&epoch) - .map(Cow::Borrowed) - .unwrap_or_else(|| Cow::Owned(HashMap::default())); - // Check active validators - for validator in &post.active { - match total_stakes.get(&validator.address) { - Some((_stake_pre, stake_post)) => { - let voting_power = VotingPower::from_tokens( - *stake_post, - params, - ); - // Any validator who's total deltas changed, - // should - // be up-to-date - if validator.voting_power != voting_power { - errors.push( - Error::InvalidActiveValidator( - validator.clone(), - ), - ) - } - } - None => { - // Others must be have the same voting power - // as in pre (active or inactive), or be a - // newly added validator - if !pre.active.contains(validator) - && !pre.inactive.contains(validator) - && !new_validators - .contains_key(&validator.address) - { - let mut is_valid = false; - - // It's also possible that for this - // validator there has been no change in - // this epoch, but in an earlier epoch. - // We attempt to search for it below and - // if the voting power matches the - // stake, this is valid. - let mut search_epoch = - u64::from(epoch) - 1; - while search_epoch - >= current_epoch.into() - { - if let Some(( - _take_pre, - last_total_stake, - )) = total_stake_by_epoch - .get(&search_epoch.into()) - .and_then(|stakes| { - stakes - .get(&validator.address) - }) - { - let voting_power = - VotingPower::from_tokens( - *last_total_stake, - params, - ); - is_valid = validator - .voting_power - == voting_power; - break; - } else { - search_epoch -= 1; - } - } - if !is_valid { - errors.push( - Error::InvalidActiveValidator( - validator.clone(), - ), - ) + Error::InvalidActiveValidator( + validator.clone(), + ), + ) } } } @@ -1641,17 +812,30 @@ where } = &new_validator; // The new validator must have set all the required fields if !(*has_state - && *has_consensus_key && *has_total_deltas && *has_voting_power - && *has_staking_reward_address - && *has_address_raw_hash) + && *has_staking_reward_address) { errors.push(Error::InvalidNewValidator( address.clone(), new_validator.clone(), )) } + match (has_address_raw_hash, has_consensus_key) { + (Some(raw_hash), Some(consensus_key)) => { + let expected_raw_hash = consensus_key.tm_raw_hash(); + if raw_hash != &expected_raw_hash { + errors.push(Error::InvalidAddressRawHash( + raw_hash.clone(), + expected_raw_hash, + )) + } + } + _ => errors.push(Error::InvalidNewValidator( + address.clone(), + new_validator.clone(), + )), + } let weighted_validator = WeightedValidator { voting_power: *voting_power, address: address.clone(), @@ -1695,3 +879,1291 @@ where errors } + +#[derive(Clone, Debug)] +struct Accumulator +where + Address: Display + + Debug + + Clone + + PartialEq + + Eq + + PartialOrd + + Ord + + Hash + + BorshDeserialize + + BorshSerialize + + BorshSchema, + TokenAmount: Display + + Clone + + Copy + + Debug + + Default + + Eq + + Add + + Sub + + AddAssign + + SubAssign + + Into + + From + + BorshDeserialize + + BorshSerialize + + BorshSchema, + TokenChange: Display + + Debug + + Default + + Clone + + Copy + + Add + + Sub + + Neg + + SubAssign + + AddAssign + + From + + Into + + From + + PartialEq + + Eq + + PartialOrd + + Ord + + BorshDeserialize + + BorshSerialize + + BorshSchema, + PublicKey: Debug, +{ + balance_delta: TokenChange, + /// Changes of validators' bonds + bond_delta: HashMap, + /// Changes of validators' unbonds + unbond_delta: HashMap, + + /// Changes of all validator total deltas (up to `unbonding_epoch`) + total_deltas: HashMap, + /// Stake calculated from validator total deltas for each epoch + /// in which it has changed (the tuple of values are in pre and post state) + total_stake_by_epoch: + HashMap>, + /// Total voting power delta calculated from validators' total deltas + expected_total_voting_power_delta_by_epoch: + HashMap, + /// Changes of validators' voting power data + voting_power_by_epoch: HashMap>, + validator_set_pre: Option>, + validator_set_post: Option>, + total_voting_power_delta_by_epoch: HashMap, + new_validators: HashMap>, +} + +/// Accumulator of storage changes +impl Default + for Accumulator +where + Address: Display + + Debug + + Clone + + PartialEq + + Eq + + PartialOrd + + Ord + + Hash + + BorshDeserialize + + BorshSerialize + + BorshSchema, + TokenAmount: Display + + Clone + + Copy + + Debug + + Default + + Eq + + Add + + Sub + + AddAssign + + SubAssign + + Into + + From + + BorshDeserialize + + BorshSerialize + + BorshSchema, + TokenChange: Display + + Debug + + Default + + Clone + + Copy + + Add + + Sub + + Neg + + SubAssign + + AddAssign + + From + + Into + + From + + PartialEq + + Eq + + PartialOrd + + Ord + + BorshDeserialize + + BorshSerialize + + BorshSchema, + PublicKey: Debug, +{ + fn default() -> Self { + Self { + balance_delta: Default::default(), + bond_delta: Default::default(), + unbond_delta: Default::default(), + total_deltas: Default::default(), + total_stake_by_epoch: Default::default(), + expected_total_voting_power_delta_by_epoch: Default::default(), + voting_power_by_epoch: Default::default(), + validator_set_pre: Default::default(), + validator_set_post: Default::default(), + total_voting_power_delta_by_epoch: Default::default(), + new_validators: Default::default(), + } + } +} + +/// An empty local type to re-use trait bounds for the functions associated with +/// `Validate` in the `impl` below +struct Validate { + address: PhantomData
, + token_amount: PhantomData, + token_change: PhantomData, + public_key: PhantomData, +} + +impl + Validate +where + Address: Display + + Debug + + Clone + + PartialEq + + Eq + + PartialOrd + + Ord + + Hash + + BorshDeserialize + + BorshSerialize + + BorshSchema, + TokenAmount: Display + + Clone + + Copy + + Debug + + Default + + Eq + + Add + + Sub + + AddAssign + + SubAssign + + Into + + From + + BorshDeserialize + + BorshSerialize + + BorshSchema, + TokenChange: Display + + Debug + + Default + + Clone + + Copy + + Add + + Sub + + Neg + + SubAssign + + AddAssign + + From + + Into + + From + + PartialEq + + Eq + + PartialOrd + + Ord + + BorshDeserialize + + BorshSerialize + + BorshSchema, + PublicKey: Debug + + Clone + + BorshDeserialize + + BorshSerialize + + BorshSchema + + PartialEq, +{ + fn accumulate_changes( + changes: Vec>, + params: &PosParams, + constants: &Constants, + errors: &mut Vec>, + ) -> Accumulator { + use DataUpdate::*; + use ValidatorUpdate::*; + + let mut accumulator = Accumulator::default(); + let Accumulator { + balance_delta, + bond_delta, + unbond_delta, + total_deltas, + total_stake_by_epoch, + expected_total_voting_power_delta_by_epoch, + voting_power_by_epoch, + validator_set_pre, + validator_set_post, + total_voting_power_delta_by_epoch, + new_validators, + } = &mut accumulator; + + for change in changes { + match change { + Validator { address, update } => match update { + State(data) => Self::validator_state( + constants, + errors, + new_validators, + address, + data, + ), + ConsensusKey(data) => Self::validator_consensus_key( + constants, + errors, + new_validators, + address, + data, + ), + StakingRewardAddress(data) => { + Self::validator_staking_reward_address( + errors, + new_validators, + address, + data, + ) + } + + TotalDeltas(data) => Self::validator_total_deltas( + constants, + errors, + total_deltas, + total_stake_by_epoch, + new_validators, + address, + data, + ), + VotingPowerUpdate(data) => Self::validator_voting_power( + params, + constants, + errors, + voting_power_by_epoch, + expected_total_voting_power_delta_by_epoch, + new_validators, + address, + data, + ), + }, + Balance(data) => Self::balance(errors, balance_delta, data), + Bond { id, data, slashes } => { + Self::bond(constants, errors, bond_delta, id, data, slashes) + } + Unbond { id, data, slashes } => Self::unbond( + constants, + errors, + unbond_delta, + id, + data, + slashes, + ), + ValidatorSet(data) => Self::validator_set( + constants, + errors, + validator_set_pre, + validator_set_post, + data, + ), + TotalVotingPower(data) => Self::total_voting_power( + constants, + errors, + total_voting_power_delta_by_epoch, + data, + ), + ValidatorAddressRawHash { raw_hash, data } => { + Self::validator_address_raw_hash( + errors, + new_validators, + raw_hash, + data, + ) + } + } + } + + accumulator + } + + fn validator_state( + constants: &Constants, + errors: &mut Vec>, + new_validators: &mut HashMap>, + address: Address, + data: Data, + ) { + match (data.pre, data.post) { + (None, Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + // Before pipeline epoch, the state must be `Pending` + for epoch in Epoch::iter_range( + constants.current_epoch, + constants.pipeline_offset, + ) { + match post.get(epoch) { + Some(ValidatorState::Pending) => {} + _ => errors.push(Error::InvalidNewValidatorState( + epoch.into(), + )), + } + } + // At pipeline epoch, the state must be `Candidate` + match post.get(constants.pipeline_epoch) { + Some(ValidatorState::Candidate) => {} + _ => errors.push(Error::InvalidNewValidatorState( + constants.pipeline_epoch.into(), + )), + } + // Add the validator to the accumulator + let validator = new_validators.entry(address).or_default(); + validator.has_state = true; + } + (Some(pre), Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + use ValidatorState::*; + // Before pipeline epoch, the only allowed state change + // is from `Inactive` to `Pending` + for epoch in Epoch::iter_range( + constants.current_epoch, + constants.pipeline_offset, + ) { + match (pre.get(epoch), post.get(epoch)) { + (Some(Inactive), Some(Pending)) => {} + (Some(state_pre), Some(state_post)) + if state_pre == state_post => {} + _ => errors.push(Error::InvalidValidatorStateUpdate( + epoch.into(), + )), + } + } + // Check allowed state changes at pipeline epoch + match ( + pre.get(constants.pipeline_epoch), + post.get(constants.pipeline_epoch), + ) { + (Some(Pending), Some(Candidate) | Some(Inactive)) + | (Some(Candidate), Some(Inactive)) + | (Some(Inactive), Some(Candidate) | Some(Pending)) => {} + _ => errors.push(Error::InvalidNewValidatorState( + constants.pipeline_epoch.into(), + )), + } + } + (Some(_), None) => { + errors.push(Error::ValidatorStateIsRequired(address)) + } + (None, None) => {} + } + } + + fn validator_consensus_key( + constants: &Constants, + errors: &mut Vec>, + new_validators: &mut HashMap>, + address: Address, + data: Data>, + ) { + match (data.pre, data.post) { + (None, Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + // The value must be known at pipeline epoch + match post.get(constants.pipeline_epoch) { + Some(consensus_key) => { + let validator = + new_validators.entry(address).or_default(); + validator.has_consensus_key = + Some(consensus_key.clone()); + } + _ => errors.push(Error::MissingNewValidatorConsensusKey( + constants.pipeline_epoch.into(), + )), + } + } + (Some(pre), Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + // Before pipeline epoch, the key must not change + for epoch in Epoch::iter_range( + constants.current_epoch, + constants.pipeline_offset, + ) { + match (pre.get(epoch), post.get(epoch)) { + (Some(key_pre), Some(key_post)) + if key_pre == key_post => + { + continue; + } + _ => errors.push( + Error::InvalidValidatorConsensusKeyUpdate( + epoch.into(), + ), + ), + } + } + } + (Some(_), None) => { + errors.push(Error::ValidatorStateIsRequired(address)) + } + (None, None) => {} + } + } + + fn validator_staking_reward_address( + errors: &mut Vec>, + new_validators: &mut HashMap>, + address: Address, + data: Data
, + ) { + match (data.pre, data.post) { + (Some(_), Some(post)) => { + if post == address { + errors + .push(Error::StakingRewardAddressEqValidator(address)); + } + } + (None, Some(post)) => { + if post == address { + errors.push(Error::StakingRewardAddressEqValidator( + address.clone(), + )); + } + let validator = new_validators.entry(address).or_default(); + validator.has_staking_reward_address = true; + } + _ => errors.push(Error::StakingRewardAddressIsRequired(address)), + } + } + + fn validator_total_deltas( + constants: &Constants, + errors: &mut Vec>, + total_deltas: &mut HashMap, + total_stake_by_epoch: &mut HashMap< + Epoch, + HashMap, + >, + new_validators: &mut HashMap>, + address: Address, + data: Data>, + ) { + match (data.pre, data.post) { + (Some(pre), Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + // Changes of all total deltas (up to `unbonding_epoch`) + let mut deltas = TokenChange::default(); + // Sum of pre total deltas + let mut pre_deltas_sum = TokenChange::default(); + // Sum of post total deltas + let mut post_deltas_sum = TokenChange::default(); + // Iter from the first epoch to the last epoch of `post` + for epoch in Epoch::iter_range( + constants.current_epoch, + constants.unbonding_offset + 1, + ) { + // Changes of all total deltas (up to + // `unbonding_epoch`) + let mut delta = TokenChange::default(); + // Find the delta in `pre` + if let Some(change) = { + if epoch == constants.current_epoch { + // On the first epoch, we have to get the + // sum of all deltas at and before that + // epoch as the `pre` could have been set in + // an older epoch + pre.get(epoch) + } else { + pre.get_delta_at_epoch(epoch).copied() + } + } { + delta -= change; + pre_deltas_sum += change; + } + // Find the delta in `post` + if let Some(change) = post.get_delta_at_epoch(epoch) { + delta += *change; + post_deltas_sum += *change; + let stake_pre: i128 = Into::into(pre_deltas_sum); + let stake_post: i128 = Into::into(post_deltas_sum); + match ( + u64::try_from(stake_pre), + u64::try_from(stake_post), + ) { + (Ok(stake_pre), Ok(stake_post)) => { + let stake_pre = TokenAmount::from(stake_pre); + let stake_post = TokenAmount::from(stake_post); + total_stake_by_epoch + .entry(epoch) + .or_insert_with(HashMap::default) + .insert( + address.clone(), + (stake_pre, stake_post), + ); + } + _ => { + errors.push(Error::InvalidValidatorTotalDeltas( + address.clone(), + stake_post, + )) + } + } + } + deltas += delta; + // A total delta can only be increased at + // `pipeline_offset` from bonds and decreased at + // `unbonding_offset` from unbonding + if delta > TokenChange::default() + && epoch != constants.pipeline_epoch + { + errors.push(Error::EpochedDataWrongEpoch { + got: epoch.into(), + expected: vec![constants.pipeline_epoch.into()], + }) + } + if delta < TokenChange::default() + && epoch != constants.unbonding_epoch + { + errors.push(Error::EpochedDataWrongEpoch { + got: epoch.into(), + expected: vec![constants.unbonding_epoch.into()], + }) + } + } + if post_deltas_sum < TokenChange::default() { + errors.push(Error::NegativeValidatorTotalDeltasSum( + address.clone(), + )) + } + if deltas != TokenChange::default() { + let deltas_entry = total_deltas.entry(address).or_default(); + *deltas_entry += deltas; + } + } + (None, Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + // Changes of all total deltas (up to `unbonding_epoch`) + let mut deltas = TokenChange::default(); + for epoch in Epoch::iter_range( + constants.current_epoch, + constants.unbonding_offset + 1, + ) { + if let Some(change) = post.get_delta_at_epoch(epoch) { + // A new total delta can only be initialized + // at `pipeline_offset` (from bonds) and updated + // at `unbonding_offset` (from unbonding) + if epoch != constants.pipeline_epoch + && epoch != constants.unbonding_epoch + { + errors.push(Error::EpochedDataWrongEpoch { + got: epoch.into(), + expected: vec![constants.pipeline_epoch.into()], + }) + } + deltas += *change; + let stake: i128 = Into::into(deltas); + match u64::try_from(stake) { + Ok(stake) => { + let stake = TokenAmount::from(stake); + total_stake_by_epoch + .entry(epoch) + .or_insert_with(HashMap::default) + .insert(address.clone(), (0.into(), stake)); + } + Err(_) => { + errors.push(Error::InvalidValidatorTotalDeltas( + address.clone(), + stake, + )) + } + } + } + } + if deltas < TokenChange::default() { + errors.push(Error::NegativeValidatorTotalDeltasSum( + address.clone(), + )) + } + if deltas != TokenChange::default() { + let deltas_entry = + total_deltas.entry(address.clone()).or_default(); + *deltas_entry += deltas; + } + let validator = new_validators.entry(address).or_default(); + validator.has_total_deltas = true; + } + (Some(_), None) => { + errors.push(Error::MissingValidatorTotalDeltas(address)) + } + (None, None) => {} + } + } + + #[allow(clippy::too_many_arguments)] + fn validator_voting_power( + params: &PosParams, + constants: &Constants, + errors: &mut Vec>, + voting_power_by_epoch: &mut HashMap< + Epoch, + HashMap, + >, + expected_total_voting_power_delta_by_epoch: &mut HashMap< + Epoch, + VotingPowerDelta, + >, + new_validators: &mut HashMap>, + address: Address, + data: Data, + ) { + match (&data.pre, data.post) { + (Some(_), Some(post)) | (None, Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + let mut voting_power = VotingPowerDelta::default(); + // Iter from the current epoch to the last epoch of + // `post` + for epoch in Epoch::iter_range( + constants.current_epoch, + constants.unbonding_offset + 1, + ) { + if let Some(delta_post) = post.get_delta_at_epoch(epoch) { + voting_power += *delta_post; + + // If the delta is not the same as in pre-state, + // accumulate the expected total voting power + // change + let delta_pre = data + .pre + .as_ref() + .and_then(|data| { + if epoch == constants.current_epoch { + // On the first epoch, we have to + // get the sum of all deltas at and + // before that epoch as the `pre` + // could have been set in an older + // epoch + data.get(epoch) + } else { + data.get_delta_at_epoch(epoch).copied() + } + }) + .unwrap_or_default(); + if delta_pre != *delta_post { + let current_delta = + expected_total_voting_power_delta_by_epoch + .entry(epoch) + .or_insert_with(Default::default); + *current_delta += *delta_post - delta_pre; + } + + let vp: i64 = Into::into(voting_power); + match u64::try_from(vp) { + Ok(vp) => { + let vp = VotingPower::from(vp); + voting_power_by_epoch + .entry(epoch) + .or_insert_with(HashMap::default) + .insert(address.clone(), vp); + } + Err(_) => { + errors.push(Error::InvalidValidatorVotingPower( + address.clone(), + vp, + )) + } + } + } + } + if data.pre.is_none() { + let validator = new_validators.entry(address).or_default(); + validator.has_voting_power = true; + validator.voting_power = post + .get_at_offset( + constants.current_epoch, + DynEpochOffset::PipelineLen, + params, + ) + .unwrap_or_default() + .try_into() + .unwrap_or_default() + } + } + (Some(_), None) => { + errors.push(Error::MissingValidatorVotingPower(address)) + } + (None, None) => {} + } + } + + fn balance( + errors: &mut Vec>, + balance_delta: &mut TokenChange, + data: Data, + ) { + match (data.pre, data.post) { + (None, Some(post)) => *balance_delta += TokenChange::from(post), + (Some(pre), Some(post)) => { + *balance_delta += + TokenChange::from(post) - TokenChange::from(pre); + } + (Some(_), None) => errors.push(Error::MissingBalance), + (None, None) => {} + } + } + + fn bond( + constants: &Constants, + errors: &mut Vec>, + bond_delta: &mut HashMap, + id: BondId
, + data: Data>, + slashes: Vec, + ) { + match (data.pre, data.post) { + // Bond may be updated from newly bonded tokens and unbonding + (Some(pre), Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + let pre_offset: u64 = match constants + .current_epoch + .checked_sub(pre.last_update()) + { + Some(offset) => offset.into(), + None => { + // If the last_update > current_epoch, the check + // above must have failed with + // `Error::InvalidLastUpdate` + return; + } + }; + + // Pre-bonds keyed by their `start_epoch` + let mut pre_bonds: HashMap = + HashMap::default(); + // We have to slash only the difference between post and + // pre, not both pre and post to avoid rounding errors + let mut slashed_deltas: HashMap = + HashMap::default(); + let mut neg_deltas: HashMap = + Default::default(); + // Iter from the first epoch of `pre` to the last epoch of + // `post` + for epoch in Epoch::iter_range( + pre.last_update(), + pre_offset + constants.unbonding_offset + 1, + ) { + if let Some(bond) = pre.get_delta_at_epoch(epoch) { + for (start_epoch, delta) in bond.pos_deltas.iter() { + let delta = TokenChange::from(*delta); + slashed_deltas.insert(*start_epoch, -delta); + pre_bonds.insert(*start_epoch, delta); + } + let ins_epoch = if epoch <= constants.current_epoch { + constants.current_epoch + } else { + epoch + }; + let entry = neg_deltas.entry(ins_epoch).or_default(); + *entry -= TokenChange::from(bond.neg_deltas); + } + if let Some(bond) = post.get_delta_at_epoch(epoch) { + for (start_epoch, delta) in bond.pos_deltas.iter() { + // An empty bond must be deleted + if *delta == TokenAmount::default() { + errors.push(Error::EmptyBond(id.clone())) + } + // On the current epoch, all bond's + // `start_epoch`s must be equal or lower than + // `current_epoch`. For all others, the + // `start_epoch` must be equal + // to the `epoch` at which it's set. + if (epoch == constants.current_epoch + && *start_epoch > constants.current_epoch) + || (epoch != constants.current_epoch + && *start_epoch != epoch) + { + errors.push(Error::InvalidBondStartEpoch { + id: id.clone(), + got: (*start_epoch).into(), + expected: epoch.into(), + }) + } + let delta = TokenChange::from(*delta); + match slashed_deltas.get_mut(start_epoch) { + Some(pre_delta) => { + if *pre_delta + delta == 0_i128.into() { + slashed_deltas.remove(start_epoch); + } else { + *pre_delta += delta; + } + } + None => { + slashed_deltas.insert(*start_epoch, delta); + } + } + + // Anywhere other than at `pipeline_offset` + // where new bonds are added, check against the + // data in `pre_bonds` to ensure that no new + // bond has been added and that the deltas are + // equal or lower to `pre_bonds` deltas. + // Note that any bonds from any epoch can be + // unbonded, even if they are not yet active. + if epoch != constants.pipeline_epoch { + match pre_bonds.get(start_epoch) { + Some(pre_delta) => { + if &delta != pre_delta { + errors.push( + Error::InvalidNewBondEpoch { + id: id.clone(), + got: epoch.into(), + expected: constants + .pipeline_epoch + .into(), + }, + ); + } + } + None => { + errors.push( + Error::InvalidNewBondEpoch { + id: id.clone(), + got: epoch.into(), + expected: (constants + .current_epoch + + constants + .pipeline_offset) + .into(), + }, + ); + } + } + } + } + if epoch != constants.unbonding_epoch { + match neg_deltas.get(&epoch) { + Some(deltas) => { + if -*deltas + != TokenChange::from(bond.neg_deltas) + { + errors.push( + Error::InvalidNegDeltaEpoch { + id: id.clone(), + got: epoch.into(), + expected: constants + .unbonding_epoch + .into(), + }, + ) + } + } + None => { + if bond.neg_deltas != 0.into() { + errors.push( + Error::InvalidNegDeltaEpoch { + id: id.clone(), + got: epoch.into(), + expected: constants + .unbonding_epoch + .into(), + }, + ) + } + } + } + } + let entry = neg_deltas.entry(epoch).or_default(); + *entry += TokenChange::from(bond.neg_deltas); + } + } + // Check slashes + for (start_epoch, delta) in slashed_deltas.iter_mut() { + for slash in &slashes { + if slash.epoch >= *start_epoch { + let raw_delta: i128 = (*delta).into(); + let current_slashed = + TokenChange::from(slash.rate * raw_delta); + *delta -= current_slashed; + } + } + } + let total = slashed_deltas + .values() + .fold(TokenChange::default(), |acc, delta| acc + *delta) + - neg_deltas + .values() + .fold(TokenChange::default(), |acc, delta| { + acc + *delta + }); + + if total != TokenChange::default() { + let bond_entry = + bond_delta.entry(id.validator).or_default(); + *bond_entry += total; + } + } + // Bond may be created from newly bonded tokens only + (None, Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + let mut total_delta = TokenChange::default(); + for epoch in Epoch::iter_range( + constants.current_epoch, + constants.unbonding_offset + 1, + ) { + if let Some(bond) = post.get_delta_at_epoch(epoch) { + // A new bond must be initialized at + // `pipeline_offset` + if epoch != constants.pipeline_epoch + && !bond.pos_deltas.is_empty() + { + dbg!(&bond.pos_deltas); + errors.push(Error::EpochedDataWrongEpoch { + got: epoch.into(), + expected: vec![constants.pipeline_epoch.into()], + }) + } + if epoch != constants.unbonding_epoch + && bond.neg_deltas != 0.into() + { + errors.push(Error::InvalidNegDeltaEpoch { + id: id.clone(), + got: epoch.into(), + expected: constants.unbonding_epoch.into(), + }) + } + for (start_epoch, delta) in bond.pos_deltas.iter() { + if *start_epoch != epoch { + errors.push(Error::InvalidBondStartEpoch { + id: id.clone(), + got: (*start_epoch).into(), + expected: epoch.into(), + }) + } + let mut delta = *delta; + // Check slashes + for slash in &slashes { + if slash.epoch >= *start_epoch { + let raw_delta: u64 = delta.into(); + let current_slashed = TokenAmount::from( + slash.rate * raw_delta, + ); + delta -= current_slashed; + } + } + let delta = TokenChange::from(delta); + total_delta += delta + } + total_delta -= TokenChange::from(bond.neg_deltas) + } + } + // An empty bond must be deleted + if total_delta == TokenChange::default() { + errors.push(Error::EmptyBond(id.clone())) + } + let bond_entry = bond_delta.entry(id.validator).or_default(); + *bond_entry += total_delta; + } + // Bond may be deleted when all the tokens are unbonded + (Some(pre), None) => { + let mut total_delta = TokenChange::default(); + for index in 0..constants.pipeline_offset + 1 { + let index = index as usize; + let epoch = pre.last_update() + index; + if let Some(bond) = pre.get_delta_at_epoch(epoch) { + for (start_epoch, delta) in &bond.pos_deltas { + let mut delta = *delta; + // Check slashes + for slash in &slashes { + if slash.epoch >= *start_epoch { + let raw_delta: u64 = delta.into(); + let current_slashed = TokenAmount::from( + slash.rate * raw_delta, + ); + delta -= current_slashed; + } + } + let delta = TokenChange::from(delta); + total_delta -= delta + } + total_delta += TokenChange::from(bond.neg_deltas) + } + } + let bond_entry = bond_delta.entry(id.validator).or_default(); + *bond_entry += total_delta; + } + (None, None) => {} + } + } + + fn unbond( + constants: &Constants, + errors: &mut Vec>, + unbond_delta: &mut HashMap, + id: BondId
, + data: Data>, + slashes: Vec, + ) { + match (data.pre, data.post) { + // Unbond may be updated from newly unbonded tokens + (Some(pre), Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + let pre_offset: u64 = match constants + .current_epoch + .checked_sub(pre.last_update()) + { + Some(offset) => offset.into(), + None => { + // If the last_update > current_epoch, the check + // above must have failed with + // `Error::InvalidLastUpdate` + return; + } + }; + + // We have to slash only the difference between post and + // pre, not both pre and post to avoid rounding errors + let mut slashed_deltas: HashMap<(Epoch, Epoch), TokenChange> = + HashMap::default(); + // Iter from the first epoch of `pre` to the last epoch of + // `post` + for epoch in Epoch::iter_range( + pre.last_update(), + pre_offset + constants.unbonding_offset + 1, + ) { + if let Some(unbond) = pre.get_delta_at_epoch(epoch) { + for ((start_epoch, end_epoch), delta) in + unbond.deltas.iter() + { + let delta = TokenChange::from(*delta); + slashed_deltas + .insert((*start_epoch, *end_epoch), -delta); + } + } + if let Some(unbond) = post.get_delta_at_epoch(epoch) { + for ((start_epoch, end_epoch), delta) in + unbond.deltas.iter() + { + let delta = TokenChange::from(*delta); + let key = (*start_epoch, *end_epoch); + match slashed_deltas.get_mut(&key) { + Some(pre_delta) => { + if *pre_delta + delta == 0_i128.into() { + slashed_deltas.remove(&key); + } else { + *pre_delta += delta; + } + } + None => { + slashed_deltas.insert(key, delta); + } + } + } + } + } + // Check slashes + for ((start_epoch, end_epoch), delta) in + slashed_deltas.iter_mut() + { + for slash in &slashes { + if slash.epoch >= *start_epoch + && slash.epoch <= *end_epoch + { + let raw_delta: i128 = (*delta).into(); + let current_slashed = + TokenChange::from(slash.rate * raw_delta); + *delta -= current_slashed; + } + } + } + let total = slashed_deltas + .values() + .fold(TokenChange::default(), |acc, delta| acc + *delta); + if total != TokenChange::default() { + let unbond_entry = + unbond_delta.entry(id.validator).or_default(); + *unbond_entry += total; + } + } + // Unbond may be created from a bond + (None, Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + let mut total_delta = TokenChange::default(); + for epoch in Epoch::iter_range( + post.last_update(), + constants.unbonding_offset + 1, + ) { + if let Some(unbond) = post.get_delta_at_epoch(epoch) { + for ((start_epoch, end_epoch), delta) in + unbond.deltas.iter() + { + let mut delta = *delta; + // Check and apply slashes, if any + for slash in &slashes { + if slash.epoch >= *start_epoch + && slash.epoch <= *end_epoch + { + let raw_delta: u64 = delta.into(); + let current_slashed = TokenAmount::from( + slash.rate * raw_delta, + ); + delta -= current_slashed; + } + } + let delta = TokenChange::from(delta); + total_delta += delta; + } + } + } + let unbond_entry = + unbond_delta.entry(id.validator).or_default(); + *unbond_entry += total_delta; + } + // Unbond may be deleted when all the tokens are withdrawn + (Some(pre), None) => { + let mut total_delta = TokenChange::default(); + for epoch in Epoch::iter_range( + pre.last_update(), + constants.unbonding_offset + 1, + ) { + if let Some(unbond) = pre.get_delta_at_epoch(epoch) { + for ((start_epoch, end_epoch), delta) in + unbond.deltas.iter() + { + let mut delta = *delta; + // Check and apply slashes, if any + for slash in &slashes { + if slash.epoch >= *start_epoch + && slash.epoch <= *end_epoch + { + let raw_delta: u64 = delta.into(); + let current_slashed = TokenAmount::from( + slash.rate * raw_delta, + ); + delta -= current_slashed; + } + } + let delta = TokenChange::from(delta); + total_delta -= delta; + } + } + } + let unbond_entry = + unbond_delta.entry(id.validator).or_default(); + *unbond_entry += total_delta; + } + (None, None) => {} + } + } + + fn validator_set( + constants: &Constants, + errors: &mut Vec>, + validator_set_pre: &mut Option>, + validator_set_post: &mut Option>, + data: Data>, + ) { + match (data.pre, data.post) { + (Some(pre), Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + *validator_set_pre = Some(pre); + *validator_set_post = Some(post); + } + _ => errors.push(Error::MissingValidatorSet), + } + } + + fn total_voting_power( + constants: &Constants, + errors: &mut Vec>, + total_voting_power_delta_by_epoch: &mut HashMap< + Epoch, + VotingPowerDelta, + >, + data: Data, + ) { + match (data.pre, data.post) { + (Some(pre), Some(post)) => { + if post.last_update() != constants.current_epoch { + errors.push(Error::InvalidLastUpdate) + } + // Iter from the first epoch to the last epoch of `post` + for epoch in Epoch::iter_range( + post.last_update(), + constants.unbonding_offset + 1, + ) { + // Find the delta in `pre` + let delta_pre = (if epoch == post.last_update() { + // On the first epoch, we have to get the + // sum of all deltas at and before that + // epoch as the `pre` could have been set in + // an older epoch + pre.get(epoch) + } else { + pre.get_delta_at_epoch(epoch).copied() + }) + .unwrap_or_default(); + // Find the delta in `post` + let delta_post = post + .get_delta_at_epoch(epoch) + .copied() + .unwrap_or_default(); + if delta_pre != delta_post { + total_voting_power_delta_by_epoch + .insert(epoch, delta_post - delta_pre); + } + } + } + _ => errors.push(Error::MissingTotalVotingPower), + } + } + + fn validator_address_raw_hash( + errors: &mut Vec>, + new_validators: &mut HashMap>, + raw_hash: String, + data: Data
, + ) { + match (data.pre, data.post) { + (None, Some(address)) => { + let validator = new_validators.entry(address).or_default(); + validator.has_address_raw_hash = Some(raw_hash); + } + (pre, post) if pre != post => { + errors.push(Error::InvalidRawHashUpdate) + } + _ => {} + } + } +} diff --git a/proto/services.proto b/proto/services.proto deleted file mode 100644 index 9f16dddb91..0000000000 --- a/proto/services.proto +++ /dev/null @@ -1,30 +0,0 @@ -syntax = "proto3"; - -package services; - -import "types.proto"; - -service RPCService { - rpc SendMessage(RpcMessage) returns (RpcResponse); -} - -message IntentMessage{ - types.Intent intent = 1; - string topic = 2; -} - -message SubscribeTopicMessage{ - string topic = 2; -} - -message RpcMessage { - oneof message { - IntentMessage intent = 1; - SubscribeTopicMessage topic = 2; - types.Dkg dkg = 3; - } -} - -message RpcResponse { - string result = 1; -} diff --git a/proto/types.proto b/proto/types.proto index b4a0162b50..58494ec824 100644 --- a/proto/types.proto +++ b/proto/types.proto @@ -11,24 +11,8 @@ message Tx { google.protobuf.Timestamp timestamp = 3; } -message Intent { - bytes data = 1; - google.protobuf.Timestamp timestamp = 2; -} - -message IntentGossipMessage{ - // TODO remove oneof because it's not used so far - oneof msg { - Intent intent = 1; - } -} - -message Dkg { - string data = 1; -} +message Dkg { string data = 1; } -message DkgGossipMessage{ - oneof dkg_message { - Dkg dkg = 1; - } +message DkgGossipMessage { + oneof dkg_message { Dkg dkg = 1; } } diff --git a/release.toml b/release.toml index e82ec3b4c3..632e36bc02 100644 --- a/release.toml +++ b/release.toml @@ -1,10 +1,7 @@ allow-branch = ["main", "maint-*"] consolidate-commits = true -disable-push = true -disable-publish = true -disable-tag = true -no-dev-version = true pre-release-commit-message = "Namada {{version}}" +publish = false +push = false shared-version = true -sign-tag = true -tag-message = "Namada {{version}}" +tag = false diff --git a/scripts/release.sh b/scripts/release.sh new file mode 100755 index 0000000000..b32ff70764 --- /dev/null +++ b/scripts/release.sh @@ -0,0 +1,59 @@ +#!/bin/sh +set -e + +if [ -z "$1" ]; then + echo "please specify a version to release" + exit 1 +fi + +REPO_ROOT=$(git rev-parse --show-toplevel) + +if [ "$REPO_ROOT" != "$PWD" ]; then + echo "please run from repository root" + exit 1 +fi + +VERSION="$1" +TAG_NAME="v$1" + +# start from a clean build +git clean -fxd + +# update the main workspace crate versions (1 commit) +HASH_BEFORE=$(git rev-parse HEAD) +cargo release --execute $VERSION +HASH_AFTER=$(git rev-parse HEAD) + +# update the wasm crate versions (2 fixups) +cd $REPO_ROOT/wasm +cargo update -w +git add Cargo.lock +git commit --fixup=$HASH_AFTER +cargo release --execute $VERSION + +# update the wasm_for_tests crate version, and rebuild them (3 fixups) +cd $REPO_ROOT/wasm_for_tests/wasm_source +cargo update -w +git add Cargo.lock +git commit --fixup=$HASH_AFTER +cargo release --execute $VERSION +make all +git add ../*.wasm +git commit --fixup=$HASH_AFTER + +# build the wasm checksums (1 fixup) +cd $REPO_ROOT +make build-wasm-scripts-docker +git add wasm/checksums.json +git commit --fixup=$HASH_AFTER + +# update the changelog (1 fixup) +unclog release --version $TAG_NAME +unclog build > CHANGELOG.md +git add .changelog CHANGELOG.md +git commit --fixup=$HASH_AFTER + +# show the user the result +git rebase --interactive --autosquash --keep-base $HASH_BEFORE + +echo "final $TAG_NAME commit ready for testing" diff --git a/shared/Cargo.toml b/shared/Cargo.toml index c16713d30c..9b5c65414e 100644 --- a/shared/Cargo.toml +++ b/shared/Cargo.toml @@ -4,7 +4,7 @@ edition = "2021" license = "GPL-3.0" name = "namada" resolver = "2" -version = "0.7.1" +version = "0.8.1" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -70,7 +70,8 @@ ark-bls12-381 = {version = "0.3"} ark-ec = {version = "0.3", optional = true} ark-serialize = "0.3" # We switch off "blake2b" because it cannot be compiled to wasm -arse-merkle-tree = {package = "sparse-merkle-tree", git = "https://github.com/heliaxdev/sparse-merkle-tree", branch = "bat/arse-merkle-tree", default-features = false, features = ["std", "borsh"]} +# branch = "bat/arse-merkle-tree" +arse-merkle-tree = {package = "sparse-merkle-tree", git = "https://github.com/heliaxdev/sparse-merkle-tree", rev = "04ad1eeb28901b57a7599bbe433b3822965dabe8", default-features = false, features = ["std", "borsh"]} bech32 = "0.8.0" borsh = "0.9.0" chrono = {version = "0.4.22", default-features = false, features = ["clock", "std"]} @@ -92,7 +93,7 @@ ibc-proto-abcipp = {package = "ibc-proto", git = "https://github.com/heliaxdev/i ibc = {version = "0.14.0", default-features = false, optional = true} ibc-proto = {version = "0.17.1", default-features = false, optional = true} ics23 = "0.7.0" -itertools = "0.10.3" +itertools = "0.10.0" loupe = {version = "0.1.3", optional = true} libsecp256k1 = {git = "https://github.com/heliaxdev/libsecp256k1", rev = "bbb3bd44a49db361f21d9db80f9a087c194c0ae9", default-features = false, features = ["std", "static-context"]} parity-wasm = {version = "0.42.2", optional = true} diff --git a/shared/build.rs b/shared/build.rs index 3c5ffb41f1..74dd72b753 100644 --- a/shared/build.rs +++ b/shared/build.rs @@ -58,9 +58,6 @@ fn main() { tonic_build::configure() .out_dir("src/proto/generated") .format(use_rustfmt) - // TODO try to add json encoding to simplify use for user - // .type_attribute("types.Intent", "#[derive(serde::Serialize, - // serde::Deserialize)]") .protoc_arg("--experimental_allow_proto3_optional") .compile(&[format!("{}/types.proto", PROTO_SRC)], &[PROTO_SRC]) .unwrap(); diff --git a/shared/release.toml b/shared/release.toml deleted file mode 100644 index 70e74bcd73..0000000000 --- a/shared/release.toml +++ /dev/null @@ -1,2 +0,0 @@ -disable-tag = false -tag-name = "v{{version}}" diff --git a/shared/src/ledger/eth_bridge/bridge_pool_vp.rs b/shared/src/ledger/eth_bridge/bridge_pool_vp.rs index 3588d76734..a765a423af 100644 --- a/shared/src/ledger/eth_bridge/bridge_pool_vp.rs +++ b/shared/src/ledger/eth_bridge/bridge_pool_vp.rs @@ -269,12 +269,17 @@ mod test_bridge_pool_vp { tx: &'a Tx, storage: &'a Storage, write_log: &'a WriteLog, + keys_changed: &'a BTreeSet, + verifiers: &'a BTreeSet
, ) -> Ctx<'a, MockDB, Sha256Hasher, WasmCacheRwAccess> { Ctx::new( + &BRIDGE_POOL_ADDRESS, storage, write_log, tx, VpGasMeter::new(0u64), + keys_changed, + verifiers, VpCache::new(temp_dir(), 100usize), ) } @@ -349,11 +354,18 @@ mod test_bridge_pool_vp { .expect("Test failed"); // add transfer to pool + let verifiers = BTreeSet::new(); let keys_changed = insert_transfer(transfer.clone(), &mut write_log); // create the data to be given to the vp let vp = BridgePoolVp { - ctx: setup_ctx(&tx, &storage, &write_log), + ctx: setup_ctx( + &tx, + &storage, + &write_log, + &keys_changed, + &verifiers, + ), }; let to_sign = transfer.try_to_vec().expect("Test failed"); @@ -592,14 +604,21 @@ mod test_bridge_pool_vp { ) .expect("Test failed"); - // create the data to be given to the vp - let vp = BridgePoolVp { - ctx: setup_ctx(&tx, &storage, &write_log), - }; // inform the vp that the merkle root changed let keys_changed = BTreeSet::default(); let verifiers = BTreeSet::default(); + // create the data to be given to the vp + let vp = BridgePoolVp { + ctx: setup_ctx( + &tx, + &storage, + &write_log, + &keys_changed, + &verifiers, + ), + }; + let to_sign = transfer.try_to_vec().expect("Test failed"); let sig = common::SigScheme::sign(&bertha_keypair(), &to_sign); let signed = SignedTxData { diff --git a/shared/src/ledger/gas.rs b/shared/src/ledger/gas.rs index 739885971e..c7da7b132c 100644 --- a/shared/src/ledger/gas.rs +++ b/shared/src/ledger/gas.rs @@ -10,7 +10,7 @@ use thiserror::Error; #[derive(Error, Debug, Clone, PartialEq, Eq)] pub enum Error { #[error("Transaction gas limit exceeded")] - TransactionGasExceedededError, + TransactionGasExceededError, #[error("Block gas limit exceeded")] BlockGasExceeded, #[error("Overflow during gas operations")] @@ -69,7 +69,7 @@ impl BlockGasMeter { .ok_or(Error::GasOverflow)?; if self.transaction_gas > TRANSACTION_GAS_LIMIT { - return Err(Error::TransactionGasExceedededError); + return Err(Error::TransactionGasExceededError); } Ok(()) } @@ -148,7 +148,7 @@ impl VpGasMeter { .ok_or(Error::GasOverflow)?; if current_total > TRANSACTION_GAS_LIMIT { - return Err(Error::TransactionGasExceedededError); + return Err(Error::TransactionGasExceededError); } Ok(()) } @@ -258,7 +258,7 @@ mod tests { meter .add(TRANSACTION_GAS_LIMIT) .expect_err("unexpectedly succeeded"), - Error::TransactionGasExceedededError + Error::TransactionGasExceededError ); } @@ -279,7 +279,7 @@ mod tests { meter .add(TRANSACTION_GAS_LIMIT + 1) .expect_err("unexpectedly succeeded"), - Error::TransactionGasExceedededError + Error::TransactionGasExceededError ); } diff --git a/shared/src/ledger/governance/parameters.rs b/shared/src/ledger/governance/parameters.rs index f860242a74..68c0744ff5 100644 --- a/shared/src/ledger/governance/parameters.rs +++ b/shared/src/ledger/governance/parameters.rs @@ -1,3 +1,5 @@ +use std::fmt::Display; + use borsh::{BorshDeserialize, BorshSerialize}; use super::storage as gov_storage; @@ -24,18 +26,38 @@ pub struct GovParams { pub max_proposal_code_size: u64, /// Minimum proposal voting period in epochs pub min_proposal_period: u64, - /// Maximimum number of characters for proposal content + /// Maximum proposal voting period in epochs + pub max_proposal_period: u64, + /// Maximum number of characters for proposal content pub max_proposal_content_size: u64, /// Minimum epochs between end and grace epochs pub min_proposal_grace_epochs: u64, } +impl Display for GovParams { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "Min. proposal fund: {}\nMax. proposal code size: {}\nMin. \ + proposal period: {}\nMax. proposal period: {}\nMax. proposal \ + content size: {}\nMin. proposal grace epochs: {}", + self.min_proposal_fund, + self.max_proposal_code_size, + self.min_proposal_period, + self.max_proposal_period, + self.max_proposal_content_size, + self.min_proposal_grace_epochs + ) + } +} + impl Default for GovParams { fn default() -> Self { Self { min_proposal_fund: 500, max_proposal_code_size: 300_000, min_proposal_period: 3, + max_proposal_period: 27, max_proposal_content_size: 10_000, min_proposal_grace_epochs: 6, } @@ -53,6 +75,7 @@ impl GovParams { min_proposal_fund, max_proposal_code_size, min_proposal_period, + max_proposal_period, max_proposal_content_size, min_proposal_grace_epochs, } = self; @@ -75,6 +98,12 @@ impl GovParams { .write(&min_proposal_period_key, encode(min_proposal_period)) .unwrap(); + let max_proposal_period_key = + gov_storage::get_max_proposal_period_key(); + storage + .write(&max_proposal_period_key, encode(max_proposal_period)) + .unwrap(); + let max_proposal_content_size_key = gov_storage::get_max_proposal_content_key(); storage diff --git a/shared/src/ledger/governance/storage.rs b/shared/src/ledger/governance/storage.rs index 50e9dc05cb..9d2f0a4e4a 100644 --- a/shared/src/ledger/governance/storage.rs +++ b/shared/src/ledger/governance/storage.rs @@ -16,6 +16,7 @@ const PROPOSAL_COMMITTING_EPOCH: &str = "epoch"; const MIN_PROPOSAL_FUND_KEY: &str = "min_fund"; const MAX_PROPOSAL_CODE_SIZE_KEY: &str = "max_code_size"; const MIN_PROPOSAL_PERIOD_KEY: &str = "min_period"; +const MAX_PROPOSAL_PERIOD_KEY: &str = "max_period"; const MAX_PROPOSAL_CONTENT_SIZE_KEY: &str = "max_content"; const MIN_GRACE_EPOCH_KEY: &str = "min_grace_epoch"; const COUNTER_KEY: &str = "counter"; @@ -242,6 +243,21 @@ pub fn is_min_proposal_period_key(key: &Key) -> bool { } } +/// Check if key is a max proposal period param key +pub fn is_max_proposal_period_key(key: &Key) -> bool { + match &key.segments[..] { + [ + DbKeySeg::AddressSeg(addr), + DbKeySeg::StringSeg(max_proposal_period_param), + ] if addr == &ADDRESS + && max_proposal_period_param == MAX_PROPOSAL_PERIOD_KEY => + { + true + } + _ => false, + } +} + /// Check if key is a min grace epoch key pub fn is_commit_proposal_key(key: &Key) -> bool { match &key.segments[..] { @@ -282,6 +298,7 @@ pub fn is_parameter_key(key: &Key) -> bool { || is_max_content_size_key(key) || is_max_proposal_code_size_key(key) || is_min_proposal_period_key(key) + || is_max_proposal_period_key(key) || is_min_grace_epoch_key(key) } @@ -318,6 +335,13 @@ pub fn get_min_proposal_period_key() -> Key { .expect("Cannot obtain a storage key") } +/// Get maximum proposal period key +pub fn get_max_proposal_period_key() -> Key { + Key::from(ADDRESS.to_db_key()) + .push(&MAX_PROPOSAL_PERIOD_KEY.to_owned()) + .expect("Cannot obtain a storage key") +} + /// Get maximum proposal content key pub fn get_max_proposal_content_key() -> Key { Key::from(ADDRESS.to_db_key()) diff --git a/shared/src/ledger/governance/utils.rs b/shared/src/ledger/governance/utils.rs index e6377e4fa6..8d4bd44223 100644 --- a/shared/src/ledger/governance/utils.rs +++ b/shared/src/ledger/governance/utils.rs @@ -12,7 +12,7 @@ use crate::ledger::pos::{BondId, Bonds, ValidatorSets, ValidatorTotalDeltas}; use crate::ledger::storage::traits::StorageHasher; use crate::ledger::storage::{DBIter, Storage, DB}; use crate::types::address::Address; -use crate::types::governance::{ProposalVote, TallyResult}; +use crate::types::governance::{ProposalVote, TallyResult, VotePower}; use crate::types::storage::{Epoch, Key}; use crate::types::token; @@ -20,11 +20,11 @@ use crate::types::token; /// outcome pub struct Votes { /// Map from validators who votes yay to their total stake amount - pub yay_validators: HashMap, + pub yay_validators: HashMap, /// Map from delegation who votes yay to their bond amount - pub yay_delegators: HashMap, + pub yay_delegators: HashMap>, /// Map from delegation who votes nay to their bond amount - pub nay_delegators: HashMap, + pub nay_delegators: HashMap>, } /// Proposal errors @@ -94,26 +94,30 @@ where nay_delegators, } = votes; - let mut total_yay_stacked_tokens = token::Amount::from(0); + let mut total_yay_stacked_tokens = VotePower::from(0_u64); for (_, amount) in yay_validators.clone().into_iter() { total_yay_stacked_tokens += amount; } // YAY: Add delegator amount whose validator didn't vote / voted nay - for (validator_address, amount) in yay_delegators.into_iter() { - if !yay_validators.contains_key(&validator_address) { - total_yay_stacked_tokens += amount; + for (_, vote_map) in yay_delegators.iter() { + for (validator_address, vote_power) in vote_map.iter() { + if !yay_validators.contains_key(validator_address) { + total_yay_stacked_tokens += vote_power; + } } } // NAY: Remove delegator amount whose validator validator vote yay - for (validator_address, amount) in nay_delegators.into_iter() { - if yay_validators.contains_key(&validator_address) { - total_yay_stacked_tokens -= amount; + for (_, vote_map) in nay_delegators.iter() { + for (validator_address, vote_power) in vote_map.iter() { + if yay_validators.contains_key(validator_address) { + total_yay_stacked_tokens -= vote_power; + } } } - if 3 * total_yay_stacked_tokens >= 2 * total_stacked_tokens { + if total_yay_stacked_tokens >= (total_stacked_tokens / 3) * 2 { TallyResult::Passed } else { TallyResult::Rejected @@ -156,9 +160,21 @@ where (Some(epoched_bonds), Some(slashes)) => { let mut delegated_amount: token::Amount = 0.into(); for bond in epoched_bonds.iter() { + let mut to_deduct = bond.neg_deltas; for (start_epoch, &(mut delta)) in - bond.deltas.iter().sorted() + bond.pos_deltas.iter().sorted() { + // deduct bond's neg_deltas + if to_deduct > delta { + to_deduct -= delta; + // If the whole bond was deducted, continue to + // the next one + continue; + } else { + delta -= to_deduct; + to_deduct = token::Amount::default(); + } + let start_epoch = Epoch::from(*start_epoch); delta = apply_slashes(&slashes, delta, start_epoch); if epoch >= start_epoch { @@ -206,9 +222,11 @@ where gov_storage::get_proposal_vote_prefix_key(proposal_id); let (vote_iter, _) = storage.iter_prefix(&vote_prefix_key); - let mut yay_validators: HashMap = HashMap::new(); - let mut yay_delegators: HashMap = HashMap::new(); - let mut nay_delegators: HashMap = HashMap::new(); + let mut yay_validators = HashMap::new(); + let mut yay_delegators: HashMap> = + HashMap::new(); + let mut nay_delegators: HashMap> = + HashMap::new(); for (key, vote_bytes, _) in vote_iter { let vote_key = Key::from_str(key.as_str()).ok(); @@ -217,32 +235,42 @@ where (Some(key), Some(vote)) => { let voter_address = gov_storage::get_voter_address(&key); match voter_address { - Some(address) => { - if vote.is_yay() && validators.contains(address) { - let amount = - get_validator_stake(storage, epoch, address); - yay_validators.insert(address.clone(), amount); - } else if !validators.contains(address) { + Some(voter_address) => { + if vote.is_yay() && validators.contains(voter_address) { + let amount = get_validator_stake( + storage, + epoch, + voter_address, + ); + yay_validators + .insert(voter_address.clone(), amount); + } else if !validators.contains(voter_address) { let validator_address = gov_storage::get_vote_delegation_address(&key); match validator_address { Some(validator_address) => { let amount = get_bond_amount_at( storage, - address, + voter_address, validator_address, epoch, ); if let Some(amount) = amount { if vote.is_yay() { - yay_delegators.insert( - address.clone(), - amount, + let entry = yay_delegators + .entry(voter_address.to_owned()) + .or_default(); + entry.insert( + validator_address.to_owned(), + VotePower::from(amount), ); } else { - nay_delegators.insert( - address.clone(), - amount, + let entry = nay_delegators + .entry(voter_address.to_owned()) + .or_default(); + entry.insert( + validator_address.to_owned(), + VotePower::from(amount), ); } } @@ -301,14 +329,14 @@ fn get_total_stacked_tokens( storage: &Storage, epoch: Epoch, validators: &[Address], -) -> token::Amount +) -> VotePower where D: DB + for<'iter> DBIter<'iter> + Sync + 'static, H: StorageHasher + Sync + 'static, { return validators .iter() - .fold(token::Amount::from(0), |acc, validator| { + .fold(VotePower::from(0_u64), |acc, validator| { acc + get_validator_stake(storage, epoch, validator) }); } @@ -317,7 +345,7 @@ fn get_validator_stake( storage: &Storage, epoch: Epoch, validator: &Address, -) -> token::Amount +) -> VotePower where D: DB + for<'iter> DBIter<'iter> + Sync + 'static, H: StorageHasher + Sync + 'static, @@ -332,9 +360,10 @@ where if let Some(total_delta) = total_delta { let epoched_total_delta = total_delta.get(epoch); if let Some(epoched_total_delta) = epoched_total_delta { - return token::Amount::from_change(epoched_total_delta); + return VotePower::try_from(epoched_total_delta) + .unwrap_or_default(); } } } - token::Amount::from(0) + VotePower::from(0_u64) } diff --git a/shared/src/ledger/governance/vp.rs b/shared/src/ledger/governance/vp.rs index fbd129b43c..aab5171ee9 100644 --- a/shared/src/ledger/governance/vp.rs +++ b/shared/src/ledger/governance/vp.rs @@ -1,6 +1,7 @@ use std::collections::BTreeSet; use borsh::BorshDeserialize; +use native_vp::VpEnv; use thiserror::Error; use super::storage as gov_storage; @@ -88,18 +89,23 @@ where let author = read(ctx, &author_key, ReadType::POST).ok(); let has_pre_author = ctx.has_key_pre(&author_key).ok(); match (has_pre_author, author) { - (Some(has_pre_author), Some(author)) => { - // TODO: if author is an implicit address, we should asssume its - // existence we should reuse the same logic as in - // check_address_existence in shared/src/vm/host_env.rs - let address_exist_key = Key::validity_predicate(&author); - let address_exist = ctx.has_key_post(&address_exist_key).ok(); - if let Some(address_exist) = address_exist { - !has_pre_author && verifiers.contains(&author) && address_exist - } else { - false + (Some(has_pre_author), Some(author)) => match author { + Address::Established(_) => { + let address_exist_key = Key::validity_predicate(&author); + let address_exist = ctx.has_key_post(&address_exist_key).ok(); + if let Some(address_exist) = address_exist { + !has_pre_author + && verifiers.contains(&author) + && address_exist + } else { + false + } } - } + Address::Implicit(_) => { + !has_pre_author && verifiers.contains(&author) + } + Address::Internal(_) => false, + }, _ => false, } } @@ -259,12 +265,16 @@ where let min_period_parameter_key = gov_storage::get_min_proposal_period_key(); let min_period: Option = read(ctx, &min_period_parameter_key, ReadType::PRE).ok(); + let max_period_parameter_key = gov_storage::get_max_proposal_period_key(); + let max_period: Option = + read(ctx, &max_period_parameter_key, ReadType::PRE).ok(); let has_pre_start_epoch = ctx.has_key_pre(&start_epoch_key).ok(); let has_pre_end_epoch = ctx.has_key_pre(&end_epoch_key).ok(); match ( has_pre_start_epoch, has_pre_end_epoch, min_period, + max_period, start_epoch, end_epoch, current_epoch, @@ -273,6 +283,7 @@ where Some(has_pre_start_epoch), Some(has_pre_end_epoch), Some(min_period), + Some(max_period), Some(start_epoch), Some(end_epoch), Some(current_epoch), @@ -284,6 +295,7 @@ where && !has_pre_end_epoch && (end_epoch - start_epoch) % min_period == 0 && (end_epoch - start_epoch).0 >= min_period + && (end_epoch - start_epoch).0 <= max_period } _ => false, } @@ -351,7 +363,7 @@ where let max_content_length = read(ctx, &max_content_length_parameter_key, ReadType::PRE).ok(); let has_pre_content = ctx.has_key_pre(&content_key).ok(); - let post_content = ctx.read_post(&content_key).unwrap(); + let post_content = ctx.read_bytes_post(&content_key).unwrap(); match (has_pre_content, post_content, max_content_length) { ( Some(has_pre_content), @@ -378,7 +390,7 @@ where let max_content_length = read(ctx, &max_content_length_parameter_key, ReadType::PRE).ok(); let has_pre_content = ctx.has_key_pre(&content_key).ok(); - let post_content = ctx.read_post(&content_key).unwrap(); + let post_content = ctx.read_bytes_post(&content_key).unwrap(); match (has_pre_content, post_content, max_content_length) { ( Some(has_pre_content), @@ -475,7 +487,7 @@ pub enum ReadType { POST, } -/// Check if a proposal id is beign executed +/// Check if a proposal id is being executed pub fn is_proposal_accepted( context: &Ctx, proposal_id: u64, @@ -505,8 +517,8 @@ where T: Clone + BorshDeserialize, { let storage_result = match read_type { - ReadType::PRE => context.read_pre(key), - ReadType::POST => context.read_post(key), + ReadType::PRE => context.read_bytes_pre(key), + ReadType::POST => context.read_bytes_post(key), }; match storage_result { diff --git a/shared/src/ledger/ibc/handler.rs b/shared/src/ledger/ibc/handler.rs index 93768ec92a..0759077e22 100644 --- a/shared/src/ledger/ibc/handler.rs +++ b/shared/src/ledger/ibc/handler.rs @@ -69,6 +69,7 @@ use crate::ibc::events::IbcEvent; use crate::ibc::mock::client_state::{MockClientState, MockConsensusState}; use crate::ibc::timestamp::Timestamp; use crate::ledger::ibc::storage; +use crate::ledger::storage_api; use crate::tendermint::Time; use crate::tendermint_proto::{Error as ProtoError, Protobuf}; use crate::types::address::{Address, InternalAddress}; @@ -116,40 +117,69 @@ pub enum Error { ReceivingToken(String), } +// This is needed to use `ibc::Handler::Error` with `IbcActions` in +// `tx_prelude/src/ibc.rs` +impl From for storage_api::Error { + fn from(err: Error) -> Self { + storage_api::Error::new(err) + } +} + /// for handling IBC modules pub type Result = std::result::Result; /// IBC trait to be implemented in integration that can read and write pub trait IbcActions { + /// IBC action error + type Error: From; + /// Read IBC-related data - fn read_ibc_data(&self, key: &Key) -> Option>; + fn read_ibc_data( + &self, + key: &Key, + ) -> std::result::Result>, Self::Error>; /// Write IBC-related data - fn write_ibc_data(&self, key: &Key, data: impl AsRef<[u8]>); + fn write_ibc_data( + &mut self, + key: &Key, + data: impl AsRef<[u8]>, + ) -> std::result::Result<(), Self::Error>; /// Delete IBC-related data - fn delete_ibc_data(&self, key: &Key); + fn delete_ibc_data( + &mut self, + key: &Key, + ) -> std::result::Result<(), Self::Error>; /// Emit an IBC event - fn emit_ibc_event(&self, event: AnomaIbcEvent); + fn emit_ibc_event( + &mut self, + event: AnomaIbcEvent, + ) -> std::result::Result<(), Self::Error>; /// Transfer token fn transfer_token( - &self, + &mut self, src: &Address, dest: &Address, token: &Address, amount: Amount, - ); + ) -> std::result::Result<(), Self::Error>; /// Get the current height of this chain - fn get_height(&self) -> BlockHeight; + fn get_height(&self) -> std::result::Result; /// Get the current time of the tendermint header of this chain - fn get_header_time(&self) -> Rfc3339String; + fn get_header_time( + &self, + ) -> std::result::Result; /// dispatch according to ICS26 routing - fn dispatch(&self, tx_data: &[u8]) -> Result<()> { + fn dispatch_ibc_action( + &mut self, + tx_data: &[u8], + ) -> std::result::Result<(), Self::Error> { let ibc_msg = IbcMessage::decode(tx_data).map_err(Error::IbcData)?; match &ibc_msg.0 { Ics26Envelope::Ics2Msg(ics02_msg) => match ics02_msg { @@ -199,14 +229,17 @@ pub trait IbcActions { } /// Create a new client - fn create_client(&self, msg: &MsgCreateAnyClient) -> Result<()> { + fn create_client( + &mut self, + msg: &MsgCreateAnyClient, + ) -> std::result::Result<(), Self::Error> { let counter_key = storage::client_counter_key(); let counter = self.get_and_inc_counter(&counter_key)?; let client_type = msg.client_state.client_type(); let client_id = client_id(client_type, counter)?; // client type let client_type_key = storage::client_type_key(&client_id); - self.write_ibc_data(&client_type_key, client_type.as_str().as_bytes()); + self.write_ibc_data(&client_type_key, client_type.as_str().as_bytes())?; // client state let client_state_key = storage::client_state_key(&client_id); self.write_ibc_data( @@ -214,7 +247,7 @@ pub trait IbcActions { msg.client_state .encode_vec() .expect("encoding shouldn't fail"), - ); + )?; // consensus state let height = msg.client_state.latest_height(); let consensus_state_key = @@ -224,29 +257,33 @@ pub trait IbcActions { msg.consensus_state .encode_vec() .expect("encoding shouldn't fail"), - ); + )?; self.set_client_update_time(&client_id)?; let event = make_create_client_event(&client_id, msg) .try_into() .unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Update a client - fn update_client(&self, msg: &MsgUpdateAnyClient) -> Result<()> { + fn update_client( + &mut self, + msg: &MsgUpdateAnyClient, + ) -> std::result::Result<(), Self::Error> { // get and update the client let client_id = msg.client_id.clone(); let client_state_key = storage::client_state_key(&client_id); - let value = self.read_ibc_data(&client_state_key).ok_or_else(|| { - Error::Client(format!( - "The client to be updated doesn't exist: ID {}", - client_id - )) - })?; + let value = + self.read_ibc_data(&client_state_key)?.ok_or_else(|| { + Error::Client(format!( + "The client to be updated doesn't exist: ID {}", + client_id + )) + })?; let client_state = AnyClientState::decode_vec(&value).map_err(Error::Decoding)?; let (new_client_state, new_consensus_state) = @@ -258,7 +295,7 @@ pub trait IbcActions { new_client_state .encode_vec() .expect("encoding shouldn't fail"), - ); + )?; let consensus_state_key = storage::consensus_state_key(&client_id, height); self.write_ibc_data( @@ -266,20 +303,23 @@ pub trait IbcActions { new_consensus_state .encode_vec() .expect("encoding shouldn't fail"), - ); + )?; self.set_client_update_time(&client_id)?; let event = make_update_client_event(&client_id, msg) .try_into() .unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Upgrade a client - fn upgrade_client(&self, msg: &MsgUpgradeAnyClient) -> Result<()> { + fn upgrade_client( + &mut self, + msg: &MsgUpgradeAnyClient, + ) -> std::result::Result<(), Self::Error> { let client_state_key = storage::client_state_key(&msg.client_id); let height = msg.client_state.latest_height(); let consensus_state_key = @@ -289,26 +329,29 @@ pub trait IbcActions { msg.client_state .encode_vec() .expect("encoding shouldn't fail"), - ); + )?; self.write_ibc_data( &consensus_state_key, msg.consensus_state .encode_vec() .expect("encoding shouldn't fail"), - ); + )?; self.set_client_update_time(&msg.client_id)?; let event = make_upgrade_client_event(&msg.client_id, msg) .try_into() .unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Initialize a connection for ConnectionOpenInit - fn init_connection(&self, msg: &MsgConnectionOpenInit) -> Result<()> { + fn init_connection( + &mut self, + msg: &MsgConnectionOpenInit, + ) -> std::result::Result<(), Self::Error> { let counter_key = storage::connection_counter_key(); let counter = self.get_and_inc_counter(&counter_key)?; // new connection @@ -318,18 +361,21 @@ pub trait IbcActions { self.write_ibc_data( &conn_key, connection.encode_vec().expect("encoding shouldn't fail"), - ); + )?; let event = make_open_init_connection_event(&conn_id, msg) .try_into() .unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Initialize a connection for ConnectionOpenTry - fn try_connection(&self, msg: &MsgConnectionOpenTry) -> Result<()> { + fn try_connection( + &mut self, + msg: &MsgConnectionOpenTry, + ) -> std::result::Result<(), Self::Error> { let counter_key = storage::connection_counter_key(); let counter = self.get_and_inc_counter(&counter_key)?; // new connection @@ -339,20 +385,23 @@ pub trait IbcActions { self.write_ibc_data( &conn_key, connection.encode_vec().expect("encoding shouldn't fail"), - ); + )?; let event = make_open_try_connection_event(&conn_id, msg) .try_into() .unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Open the connection for ConnectionOpenAck - fn ack_connection(&self, msg: &MsgConnectionOpenAck) -> Result<()> { + fn ack_connection( + &mut self, + msg: &MsgConnectionOpenAck, + ) -> std::result::Result<(), Self::Error> { let conn_key = storage::connection_key(&msg.connection_id); - let value = self.read_ibc_data(&conn_key).ok_or_else(|| { + let value = self.read_ibc_data(&conn_key)?.ok_or_else(|| { Error::Connection(format!( "The connection to be opened doesn't exist: ID {}", msg.connection_id @@ -368,18 +417,21 @@ pub trait IbcActions { self.write_ibc_data( &conn_key, connection.encode_vec().expect("encoding shouldn't fail"), - ); + )?; let event = make_open_ack_connection_event(msg).try_into().unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Open the connection for ConnectionOpenConfirm - fn confirm_connection(&self, msg: &MsgConnectionOpenConfirm) -> Result<()> { + fn confirm_connection( + &mut self, + msg: &MsgConnectionOpenConfirm, + ) -> std::result::Result<(), Self::Error> { let conn_key = storage::connection_key(&msg.connection_id); - let value = self.read_ibc_data(&conn_key).ok_or_else(|| { + let value = self.read_ibc_data(&conn_key)?.ok_or_else(|| { Error::Connection(format!( "The connection to be opend doesn't exist: ID {}", msg.connection_id @@ -391,16 +443,19 @@ pub trait IbcActions { self.write_ibc_data( &conn_key, connection.encode_vec().expect("encoding shouldn't fail"), - ); + )?; let event = make_open_confirm_connection_event(msg).try_into().unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Initialize a channel for ChannelOpenInit - fn init_channel(&self, msg: &MsgChannelOpenInit) -> Result<()> { + fn init_channel( + &mut self, + msg: &MsgChannelOpenInit, + ) -> std::result::Result<(), Self::Error> { self.bind_port(&msg.port_id)?; let counter_key = storage::channel_counter_key(); let counter = self.get_and_inc_counter(&counter_key)?; @@ -410,18 +465,21 @@ pub trait IbcActions { self.write_ibc_data( &channel_key, msg.channel.encode_vec().expect("encoding shouldn't fail"), - ); + )?; let event = make_open_init_channel_event(&channel_id, msg) .try_into() .unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Initialize a channel for ChannelOpenTry - fn try_channel(&self, msg: &MsgChannelOpenTry) -> Result<()> { + fn try_channel( + &mut self, + msg: &MsgChannelOpenTry, + ) -> std::result::Result<(), Self::Error> { self.bind_port(&msg.port_id)?; let counter_key = storage::channel_counter_key(); let counter = self.get_and_inc_counter(&counter_key)?; @@ -431,22 +489,25 @@ pub trait IbcActions { self.write_ibc_data( &channel_key, msg.channel.encode_vec().expect("encoding shouldn't fail"), - ); + )?; let event = make_open_try_channel_event(&channel_id, msg) .try_into() .unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Open the channel for ChannelOpenAck - fn ack_channel(&self, msg: &MsgChannelOpenAck) -> Result<()> { + fn ack_channel( + &mut self, + msg: &MsgChannelOpenAck, + ) -> std::result::Result<(), Self::Error> { let port_channel_id = port_channel_id(msg.port_id.clone(), msg.channel_id); let channel_key = storage::channel_key(&port_channel_id); - let value = self.read_ibc_data(&channel_key).ok_or_else(|| { + let value = self.read_ibc_data(&channel_key)?.ok_or_else(|| { Error::Channel(format!( "The channel to be opened doesn't exist: Port/Channel {}", port_channel_id @@ -459,22 +520,25 @@ pub trait IbcActions { self.write_ibc_data( &channel_key, channel.encode_vec().expect("encoding shouldn't fail"), - ); + )?; let event = make_open_ack_channel_event(msg, &channel)? .try_into() .unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Open the channel for ChannelOpenConfirm - fn confirm_channel(&self, msg: &MsgChannelOpenConfirm) -> Result<()> { + fn confirm_channel( + &mut self, + msg: &MsgChannelOpenConfirm, + ) -> std::result::Result<(), Self::Error> { let port_channel_id = port_channel_id(msg.port_id.clone(), msg.channel_id); let channel_key = storage::channel_key(&port_channel_id); - let value = self.read_ibc_data(&channel_key).ok_or_else(|| { + let value = self.read_ibc_data(&channel_key)?.ok_or_else(|| { Error::Channel(format!( "The channel to be opened doesn't exist: Port/Channel {}", port_channel_id @@ -486,22 +550,25 @@ pub trait IbcActions { self.write_ibc_data( &channel_key, channel.encode_vec().expect("encoding shouldn't fail"), - ); + )?; let event = make_open_confirm_channel_event(msg, &channel)? .try_into() .unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Close the channel for ChannelCloseInit - fn close_init_channel(&self, msg: &MsgChannelCloseInit) -> Result<()> { + fn close_init_channel( + &mut self, + msg: &MsgChannelCloseInit, + ) -> std::result::Result<(), Self::Error> { let port_channel_id = port_channel_id(msg.port_id.clone(), msg.channel_id); let channel_key = storage::channel_key(&port_channel_id); - let value = self.read_ibc_data(&channel_key).ok_or_else(|| { + let value = self.read_ibc_data(&channel_key)?.ok_or_else(|| { Error::Channel(format!( "The channel to be closed doesn't exist: Port/Channel {}", port_channel_id @@ -513,25 +580,25 @@ pub trait IbcActions { self.write_ibc_data( &channel_key, channel.encode_vec().expect("encoding shouldn't fail"), - ); + )?; let event = make_close_init_channel_event(msg, &channel)? .try_into() .unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Close the channel for ChannelCloseConfirm fn close_confirm_channel( - &self, + &mut self, msg: &MsgChannelCloseConfirm, - ) -> Result<()> { + ) -> std::result::Result<(), Self::Error> { let port_channel_id = port_channel_id(msg.port_id.clone(), msg.channel_id); let channel_key = storage::channel_key(&port_channel_id); - let value = self.read_ibc_data(&channel_key).ok_or_else(|| { + let value = self.read_ibc_data(&channel_key)?.ok_or_else(|| { Error::Channel(format!( "The channel to be closed doesn't exist: Port/Channel {}", port_channel_id @@ -543,24 +610,24 @@ pub trait IbcActions { self.write_ibc_data( &channel_key, channel.encode_vec().expect("encoding shouldn't fail"), - ); + )?; let event = make_close_confirm_channel_event(msg, &channel)? .try_into() .unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Send a packet fn send_packet( - &self, + &mut self, port_channel_id: PortChannelId, data: Vec, timeout_height: Height, timeout_timestamp: Timestamp, - ) -> Result<()> { + ) -> std::result::Result<(), Self::Error> { // get and increment the next sequence send let seq_key = storage::next_sequence_send_key(&port_channel_id); let sequence = self.get_and_inc_sequence(&seq_key)?; @@ -568,7 +635,7 @@ pub trait IbcActions { // get the channel for the destination info. let channel_key = storage::channel_key(&port_channel_id); let channel = self - .read_ibc_data(&channel_key) + .read_ibc_data(&channel_key)? .expect("cannot get the channel to be closed"); let channel = ChannelEnd::decode_vec(&channel).expect("cannot get the channel"); @@ -594,16 +661,19 @@ pub trait IbcActions { packet.sequence, ); let commitment = commitment(&packet); - self.write_ibc_data(&commitment_key, commitment.into_vec()); + self.write_ibc_data(&commitment_key, commitment.into_vec())?; let event = make_send_packet_event(packet).try_into().unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Receive a packet - fn receive_packet(&self, msg: &MsgRecvPacket) -> Result<()> { + fn receive_packet( + &mut self, + msg: &MsgRecvPacket, + ) -> std::result::Result<(), Self::Error> { // check the packet data if let Ok(data) = serde_json::from_slice(&msg.packet.data) { self.receive_token(&msg.packet, &data)?; @@ -615,7 +685,7 @@ pub trait IbcActions { &msg.packet.destination_channel, msg.packet.sequence, ); - self.write_ibc_data(&receipt_key, PacketReceipt::default().as_bytes()); + self.write_ibc_data(&receipt_key, PacketReceipt::default().as_bytes())?; // store the ack let ack_key = storage::ack_key( @@ -625,7 +695,7 @@ pub trait IbcActions { ); let ack = PacketAck::default().encode_to_vec(); let ack_commitment = sha2::Sha256::digest(&ack).to_vec(); - self.write_ibc_data(&ack_key, ack_commitment); + self.write_ibc_data(&ack_key, ack_commitment)?; // increment the next sequence receive let port_channel_id = port_channel_id( @@ -638,28 +708,34 @@ pub trait IbcActions { let event = make_write_ack_event(msg.packet.clone(), ack) .try_into() .unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Receive a acknowledgement - fn acknowledge_packet(&self, msg: &MsgAcknowledgement) -> Result<()> { + fn acknowledge_packet( + &mut self, + msg: &MsgAcknowledgement, + ) -> std::result::Result<(), Self::Error> { let commitment_key = storage::commitment_key( &msg.packet.source_port, &msg.packet.source_channel, msg.packet.sequence, ); - self.delete_ibc_data(&commitment_key); + self.delete_ibc_data(&commitment_key)?; let event = make_ack_event(msg.packet.clone()).try_into().unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Receive a timeout - fn timeout_packet(&self, msg: &MsgTimeout) -> Result<()> { + fn timeout_packet( + &mut self, + msg: &MsgTimeout, + ) -> std::result::Result<(), Self::Error> { // check the packet data if let Ok(data) = serde_json::from_slice(&msg.packet.data) { self.refund_token(&msg.packet, &data)?; @@ -671,7 +747,7 @@ pub trait IbcActions { &msg.packet.source_channel, msg.packet.sequence, ); - self.delete_ibc_data(&commitment_key); + self.delete_ibc_data(&commitment_key)?; // close the channel let port_channel_id = port_channel_id( @@ -679,7 +755,7 @@ pub trait IbcActions { msg.packet.source_channel, ); let channel_key = storage::channel_key(&port_channel_id); - let value = self.read_ibc_data(&channel_key).ok_or_else(|| { + let value = self.read_ibc_data(&channel_key)?.ok_or_else(|| { Error::Channel(format!( "The channel to be closed doesn't exist: Port/Channel {}", port_channel_id @@ -692,17 +768,20 @@ pub trait IbcActions { self.write_ibc_data( &channel_key, channel.encode_vec().expect("encoding shouldn't fail"), - ); + )?; } let event = make_timeout_event(msg.packet.clone()).try_into().unwrap(); - self.emit_ibc_event(event); + self.emit_ibc_event(event)?; Ok(()) } /// Receive a timeout for TimeoutOnClose - fn timeout_on_close_packet(&self, msg: &MsgTimeoutOnClose) -> Result<()> { + fn timeout_on_close_packet( + &mut self, + msg: &MsgTimeoutOnClose, + ) -> std::result::Result<(), Self::Error> { // check the packet data if let Ok(data) = serde_json::from_slice(&msg.packet.data) { self.refund_token(&msg.packet, &data)?; @@ -714,7 +793,7 @@ pub trait IbcActions { &msg.packet.source_channel, msg.packet.sequence, ); - self.delete_ibc_data(&commitment_key); + self.delete_ibc_data(&commitment_key)?; // close the channel let port_channel_id = port_channel_id( @@ -722,7 +801,7 @@ pub trait IbcActions { msg.packet.source_channel, ); let channel_key = storage::channel_key(&port_channel_id); - let value = self.read_ibc_data(&channel_key).ok_or_else(|| { + let value = self.read_ibc_data(&channel_key)?.ok_or_else(|| { Error::Channel(format!( "The channel to be closed doesn't exist: Port/Channel {}", port_channel_id @@ -735,15 +814,18 @@ pub trait IbcActions { self.write_ibc_data( &channel_key, channel.encode_vec().expect("encoding shouldn't fail"), - ); + )?; } Ok(()) } /// Set the timestamp and the height for the client update - fn set_client_update_time(&self, client_id: &ClientId) -> Result<()> { - let time = Time::parse_from_rfc3339(&self.get_header_time().0) + fn set_client_update_time( + &mut self, + client_id: &ClientId, + ) -> std::result::Result<(), Self::Error> { + let time = Time::parse_from_rfc3339(&self.get_header_time()?.0) .map_err(|e| { Error::Time(format!("The time of the header is invalid: {}", e)) })?; @@ -751,36 +833,42 @@ pub trait IbcActions { self.write_ibc_data( &key, time.encode_vec().expect("encoding shouldn't fail"), - ); + )?; // the revision number is always 0 - let height = Height::new(0, self.get_height().0); + let height = Height::new(0, self.get_height()?.0); let height_key = storage::client_update_height_key(client_id); // write the current height as u64 self.write_ibc_data( &height_key, height.encode_vec().expect("Encoding shouldn't fail"), - ); + )?; Ok(()) } /// Get and increment the counter - fn get_and_inc_counter(&self, key: &Key) -> Result { - let value = self.read_ibc_data(key).ok_or_else(|| { + fn get_and_inc_counter( + &mut self, + key: &Key, + ) -> std::result::Result { + let value = self.read_ibc_data(key)?.ok_or_else(|| { Error::Counter(format!("The counter doesn't exist: {}", key)) })?; let value: [u8; 8] = value.try_into().map_err(|_| { Error::Counter(format!("The counter value wasn't u64: Key {}", key)) })?; let counter = u64::from_be_bytes(value); - self.write_ibc_data(key, (counter + 1).to_be_bytes()); + self.write_ibc_data(key, (counter + 1).to_be_bytes())?; Ok(counter) } /// Get and increment the sequence - fn get_and_inc_sequence(&self, key: &Key) -> Result { - let index = match self.read_ibc_data(key) { + fn get_and_inc_sequence( + &mut self, + key: &Key, + ) -> std::result::Result { + let index = match self.read_ibc_data(key)? { Some(v) => { let index: [u8; 8] = v.try_into().map_err(|_| { Error::Sequence(format!( @@ -793,29 +881,35 @@ pub trait IbcActions { // when the sequence has never been used, returns the initial value None => 1, }; - self.write_ibc_data(key, (index + 1).to_be_bytes()); + self.write_ibc_data(key, (index + 1).to_be_bytes())?; Ok(index.into()) } /// Bind a new port - fn bind_port(&self, port_id: &PortId) -> Result<()> { + fn bind_port( + &mut self, + port_id: &PortId, + ) -> std::result::Result<(), Self::Error> { let port_key = storage::port_key(port_id); - match self.read_ibc_data(&port_key) { + match self.read_ibc_data(&port_key)? { Some(_) => {} None => { // create a new capability and claim it let index_key = storage::capability_index_key(); let cap_index = self.get_and_inc_counter(&index_key)?; - self.write_ibc_data(&port_key, cap_index.to_be_bytes()); + self.write_ibc_data(&port_key, cap_index.to_be_bytes())?; let cap_key = storage::capability_key(cap_index); - self.write_ibc_data(&cap_key, port_id.as_bytes()); + self.write_ibc_data(&cap_key, port_id.as_bytes())?; } } Ok(()) } /// Send the specified token by escrowing or burning - fn send_token(&self, msg: &MsgTransfer) -> Result<()> { + fn send_token( + &mut self, + msg: &MsgTransfer, + ) -> std::result::Result<(), Self::Error> { let data = FungibleTokenPacketData::from(msg.clone()); let source = Address::decode(data.sender.clone()).map_err(|e| { Error::SendingToken(format!( @@ -852,7 +946,7 @@ pub trait IbcActions { if data.denomination.starts_with(&prefix) { // sink zone let burn = Address::Internal(InternalAddress::IbcBurn); - self.transfer_token(&source, &burn, &token, amount); + self.transfer_token(&source, &burn, &token, amount)?; } else { // source zone let escrow = @@ -860,7 +954,7 @@ pub trait IbcActions { msg.source_port.to_string(), msg.source_channel.to_string(), )); - self.transfer_token(&source, &escrow, &token, amount); + self.transfer_token(&source, &escrow, &token, amount)?; } // send a packet @@ -878,10 +972,10 @@ pub trait IbcActions { /// Receive the specified token by unescrowing or minting fn receive_token( - &self, + &mut self, packet: &Packet, data: &FungibleTokenPacketData, - ) -> Result<()> { + ) -> std::result::Result<(), Self::Error> { let dest = Address::decode(data.receiver.clone()).map_err(|e| { Error::ReceivingToken(format!( "Invalid receiver address: receiver {}, error {}", @@ -920,21 +1014,21 @@ pub trait IbcActions { packet.destination_port.to_string(), packet.destination_channel.to_string(), )); - self.transfer_token(&escrow, &dest, &token, amount); + self.transfer_token(&escrow, &dest, &token, amount)?; } else { // mint the token because the sender chain is the source let mint = Address::Internal(InternalAddress::IbcMint); - self.transfer_token(&mint, &dest, &token, amount); + self.transfer_token(&mint, &dest, &token, amount)?; } Ok(()) } /// Refund the specified token by unescrowing or minting fn refund_token( - &self, + &mut self, packet: &Packet, data: &FungibleTokenPacketData, - ) -> Result<()> { + ) -> std::result::Result<(), Self::Error> { let dest = Address::decode(data.sender.clone()).map_err(|e| { Error::ReceivingToken(format!( "Invalid sender address: sender {}, error {}", @@ -969,7 +1063,7 @@ pub trait IbcActions { if data.denomination.starts_with(&prefix) { // mint the token because the sender chain is the sink zone let mint = Address::Internal(InternalAddress::IbcMint); - self.transfer_token(&mint, &dest, &token, amount); + self.transfer_token(&mint, &dest, &token, amount)?; } else { // unescrow the token because the sender chain is the source zone let escrow = @@ -977,7 +1071,7 @@ pub trait IbcActions { packet.source_port.to_string(), packet.source_channel.to_string(), )); - self.transfer_token(&escrow, &dest, &token, amount); + self.transfer_token(&escrow, &dest, &token, amount)?; } Ok(()) } @@ -1109,17 +1203,17 @@ pub fn packet_from_message( /// Returns a commitment from the given packet pub fn commitment(packet: &Packet) -> PacketCommitment { - let input = packet - .timeout_timestamp - .nanoseconds() - .to_be_bytes() - .to_vec(); + let timeout = packet.timeout_timestamp.nanoseconds().to_be_bytes(); let revision_number = packet.timeout_height.revision_number.to_be_bytes(); - let input = [input.as_slice(), revision_number.as_slice()].concat(); let revision_height = packet.timeout_height.revision_height.to_be_bytes(); - let input = [input.as_slice(), revision_height.as_slice()].concat(); let data = sha2::Sha256::digest(&packet.data); - let input = [input.as_slice(), data.as_slice()].concat(); + let input = [ + &timeout, + &revision_number, + &revision_height, + data.as_slice(), + ] + .concat(); sha2::Sha256::digest(&input).to_vec().into() } diff --git a/shared/src/ledger/ibc/vp/channel.rs b/shared/src/ledger/ibc/vp/channel.rs index a0dafebc00..fece1360f4 100644 --- a/shared/src/ledger/ibc/vp/channel.rs +++ b/shared/src/ledger/ibc/vp/channel.rs @@ -51,7 +51,7 @@ use crate::ibc::core::ics26_routing::context::ModuleId; use crate::ibc::core::ics26_routing::msgs::Ics26Envelope; use crate::ibc::proofs::Proofs; use crate::ibc::timestamp::Timestamp; -use crate::ledger::native_vp::Error as NativeVpError; +use crate::ledger::native_vp::{Error as NativeVpError, VpEnv}; use crate::ledger::parameters; use crate::ledger::storage::traits::StorageHasher; use crate::ledger::storage::{self as ledger_storage}; @@ -533,7 +533,7 @@ where } fn get_sequence_pre(&self, key: &Key) -> Result { - match self.ctx.read_pre(key)? { + match self.ctx.read_bytes_pre(key)? { Some(value) => { // As ibc-go, u64 like a counter is encoded with big-endian let index: [u8; 8] = value.try_into().map_err(|_| { @@ -551,7 +551,7 @@ where } fn get_sequence(&self, key: &Key) -> Result { - match self.ctx.read_post(key)? { + match self.ctx.read_bytes_post(key)? { Some(value) => { // As ibc-go, u64 like a counter is encoded with big-endian let index: [u8; 8] = value.try_into().map_err(|_| { @@ -590,7 +590,7 @@ where port_channel_id: &PortChannelId, ) -> Result { let key = channel_key(port_channel_id); - match self.ctx.read_pre(&key) { + match self.ctx.read_bytes_pre(&key) { Ok(Some(value)) => ChannelEnd::decode_vec(&value).map_err(|e| { Error::InvalidChannel(format!( "Decoding the channel failed: Port/Channel {}, {}", @@ -637,7 +637,7 @@ where key: &(PortId, ChannelId, Sequence), ) -> Result { let key = commitment_key(&key.0, &key.1, key.2); - match self.ctx.read_pre(&key)? { + match self.ctx.read_bytes_pre(&key)? { Some(value) => Ok(value.into()), None => Err(Error::InvalidPacketInfo(format!( "The prior commitment doesn't exist: Key {}", @@ -651,7 +651,7 @@ where client_id: &ClientId, ) -> Result { let key = client_update_timestamp_key(client_id); - match self.ctx.read_pre(&key)? { + match self.ctx.read_bytes_pre(&key)? { Some(value) => { let time = Time::decode_vec(&value).map_err(|_| { Error::InvalidTimestamp(format!( @@ -673,7 +673,7 @@ where client_id: &ClientId, ) -> Result { let key = client_update_height_key(client_id); - match self.ctx.read_pre(&key)? { + match self.ctx.read_bytes_pre(&key)? { Some(value) => Height::decode_vec(&value).map_err(|_| { Error::InvalidHeight(format!( "Height conversion failed: ID {}", @@ -709,7 +709,7 @@ where channel_id: port_channel_id.1, }; let key = channel_key(&port_channel_id); - match self.ctx.read_post(&key) { + match self.ctx.read_bytes_post(&key) { Ok(Some(value)) => ChannelEnd::decode_vec(&value) .map_err(|_| Ics04Error::implementation_specific()), Ok(None) => Err(Ics04Error::channel_not_found( @@ -857,7 +857,7 @@ where key: &(PortId, ChannelId, Sequence), ) -> Ics04Result { let commitment_key = commitment_key(&key.0, &key.1, key.2); - match self.ctx.read_post(&commitment_key) { + match self.ctx.read_bytes_post(&commitment_key) { Ok(Some(value)) => Ok(value.into()), Ok(None) => Err(Ics04Error::packet_commitment_not_found(key.2)), Err(_) => Err(Ics04Error::implementation_specific()), @@ -870,7 +870,7 @@ where ) -> Ics04Result { let receipt_key = receipt_key(&key.0, &key.1, key.2); let expect = PacketReceipt::default().as_bytes().to_vec(); - match self.ctx.read_post(&receipt_key) { + match self.ctx.read_bytes_post(&receipt_key) { Ok(Some(v)) if v == expect => Ok(Receipt::Ok), _ => Err(Ics04Error::packet_receipt_not_found(key.2)), } @@ -881,7 +881,7 @@ where key: &(PortId, ChannelId, Sequence), ) -> Ics04Result { let ack_key = ack_key(&key.0, &key.1, key.2); - match self.ctx.read_post(&ack_key) { + match self.ctx.read_bytes_post(&ack_key) { Ok(Some(value)) => Ok(value.into()), Ok(None) => Err(Ics04Error::packet_commitment_not_found(key.2)), Err(_) => Err(Ics04Error::implementation_specific()), @@ -917,7 +917,7 @@ where height: Height, ) -> Ics04Result { let key = client_update_timestamp_key(client_id); - match self.ctx.read_post(&key) { + match self.ctx.read_bytes_post(&key) { Ok(Some(value)) => { let time = Time::decode_vec(&value) .map_err(|_| Ics04Error::implementation_specific())?; @@ -937,7 +937,7 @@ where height: Height, ) -> Ics04Result { let key = client_update_height_key(client_id); - match self.ctx.read_post(&key) { + match self.ctx.read_bytes_post(&key) { Ok(Some(value)) => Height::decode_vec(&value) .map_err(|_| Ics04Error::implementation_specific()), Ok(None) => Err(Ics04Error::processed_height_not_found( diff --git a/shared/src/ledger/ibc/vp/client.rs b/shared/src/ledger/ibc/vp/client.rs index 8b1fa8b73d..45d66443bc 100644 --- a/shared/src/ledger/ibc/vp/client.rs +++ b/shared/src/ledger/ibc/vp/client.rs @@ -31,6 +31,7 @@ use crate::ibc::core::ics04_channel::context::ChannelReader; use crate::ibc::core::ics23_commitment::commitment::CommitmentRoot; use crate::ibc::core::ics24_host::identifier::ClientId; use crate::ibc::core::ics26_routing::msgs::Ics26Envelope; +use crate::ledger::native_vp::VpEnv; use crate::ledger::storage::traits::StorageHasher; use crate::ledger::storage::{self}; use crate::tendermint_proto::Protobuf; @@ -379,7 +380,7 @@ where fn client_state_pre(&self, client_id: &ClientId) -> Result { let key = client_state_key(client_id); - match self.ctx.read_pre(&key) { + match self.ctx.read_bytes_pre(&key) { Ok(Some(value)) => { AnyClientState::decode_vec(&value).map_err(|e| { Error::InvalidClient(format!( @@ -411,7 +412,7 @@ where { fn client_type(&self, client_id: &ClientId) -> Ics02Result { let key = client_type_key(client_id); - match self.ctx.read_post(&key) { + match self.ctx.read_bytes_post(&key) { Ok(Some(value)) => { let type_str = std::str::from_utf8(&value) .map_err(|_| Ics02Error::implementation_specific())?; @@ -428,7 +429,7 @@ where client_id: &ClientId, ) -> Ics02Result { let key = client_state_key(client_id); - match self.ctx.read_post(&key) { + match self.ctx.read_bytes_post(&key) { Ok(Some(value)) => AnyClientState::decode_vec(&value) .map_err(|_| Ics02Error::implementation_specific()), Ok(None) => Err(Ics02Error::client_not_found(client_id.clone())), @@ -442,7 +443,7 @@ where height: Height, ) -> Ics02Result { let key = consensus_state_key(client_id, height); - match self.ctx.read_post(&key) { + match self.ctx.read_bytes_post(&key) { Ok(Some(value)) => AnyConsensusState::decode_vec(&value) .map_err(|_| Ics02Error::implementation_specific()), Ok(None) => Err(Ics02Error::consensus_state_not_found( @@ -460,7 +461,7 @@ where height: Height, ) -> Ics02Result> { let key = consensus_state_key(client_id, height); - match self.ctx.read_pre(&key) { + match self.ctx.read_bytes_pre(&key) { Ok(Some(value)) => { let cs = AnyConsensusState::decode_vec(&value) .map_err(|_| Ics02Error::implementation_specific())?; diff --git a/shared/src/ledger/ibc/vp/connection.rs b/shared/src/ledger/ibc/vp/connection.rs index 4037d1b02a..69a87eb128 100644 --- a/shared/src/ledger/ibc/vp/connection.rs +++ b/shared/src/ledger/ibc/vp/connection.rs @@ -27,6 +27,7 @@ use crate::ibc::core::ics03_connection::msgs::conn_open_confirm::MsgConnectionOp use crate::ibc::core::ics03_connection::msgs::conn_open_try::MsgConnectionOpenTry; use crate::ibc::core::ics23_commitment::commitment::CommitmentPrefix; use crate::ibc::core::ics24_host::identifier::{ClientId, ConnectionId}; +use crate::ledger::native_vp::VpEnv; use crate::ledger::storage::traits::StorageHasher; use crate::ledger::storage::{self}; use crate::tendermint_proto::Protobuf; @@ -325,7 +326,7 @@ where conn_id: &ConnectionId, ) -> Result { let key = connection_key(conn_id); - match self.ctx.read_pre(&key) { + match self.ctx.read_bytes_pre(&key) { Ok(Some(value)) => ConnectionEnd::decode_vec(&value).map_err(|e| { Error::InvalidConnection(format!( "Decoding the connection failed: {}", @@ -357,7 +358,7 @@ where conn_id: &ConnectionId, ) -> Ics03Result { let key = connection_key(conn_id); - match self.ctx.read_post(&key) { + match self.ctx.read_bytes_post(&key) { Ok(Some(value)) => ConnectionEnd::decode_vec(&value) .map_err(|_| Ics03Error::implementation_specific()), Ok(None) => Err(Ics03Error::connection_not_found(conn_id.clone())), diff --git a/shared/src/ledger/ibc/vp/mod.rs b/shared/src/ledger/ibc/vp/mod.rs index 6797a61054..b6ffd32cba 100644 --- a/shared/src/ledger/ibc/vp/mod.rs +++ b/shared/src/ledger/ibc/vp/mod.rs @@ -17,7 +17,7 @@ pub use token::{Error as IbcTokenError, IbcToken}; use super::storage::{client_id, ibc_prefix, is_client_counter_key, IbcPrefix}; use crate::ibc::core::ics02_client::context::ClientReader; use crate::ibc::events::IbcEvent; -use crate::ledger::native_vp::{self, Ctx, NativeVp}; +use crate::ledger::native_vp::{self, Ctx, NativeVp, VpEnv}; use crate::ledger::storage::traits::StorageHasher; use crate::ledger::storage::{self as ledger_storage}; use crate::proto::SignedTxData; @@ -185,7 +185,7 @@ where } fn read_counter_pre(&self, key: &Key) -> Result { - match self.ctx.read_pre(key) { + match self.ctx.read_bytes_pre(key) { Ok(Some(value)) => { // As ibc-go, u64 like a counter is encoded with big-endian let counter: [u8; 8] = value.try_into().map_err(|_| { @@ -206,7 +206,7 @@ where } fn read_counter(&self, key: &Key) -> Result { - match self.ctx.read_post(key) { + match self.ctx.read_bytes_post(key) { Ok(Some(value)) => { // As ibc-go, u64 like a counter is encoded with big-endian let counter: [u8; 8] = value.try_into().map_err(|_| { @@ -375,6 +375,8 @@ mod tests { use crate::vm::wasm; use crate::types::storage::{BlockHash, BlockHeight}; + const ADDRESS: Address = Address::Internal(InternalAddress::Ibc); + fn get_client_id() -> ClientId { ClientId::from_str("test_client").expect("Creating a client ID failed") } @@ -558,13 +560,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); - let mut keys_changed = BTreeSet::new(); let client_state_key = client_state_key(&get_client_id()); keys_changed.insert(client_state_key); let verifiers = BTreeSet::new(); + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; // this should return true because state has been stored @@ -588,13 +598,22 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); let client_state_key = client_state_key(&get_client_id()); keys_changed.insert(client_state_key); let verifiers = BTreeSet::new(); + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; // this should fail because no state is stored @@ -658,13 +677,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(client_state_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; // this should return true because state has been stored assert!( @@ -707,13 +734,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(conn_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; // this should return true because state has been stored assert!( @@ -753,13 +788,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(conn_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; // this should fail because no client exists let result = ibc @@ -825,13 +868,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(conn_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; // this should return true because state has been stored assert!( @@ -903,13 +954,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(conn_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( ibc.validate_tx( @@ -968,13 +1027,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(conn_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( ibc.validate_tx( @@ -1019,13 +1086,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(channel_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( ibc.validate_tx( @@ -1089,13 +1164,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(channel_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( ibc.validate_tx( @@ -1167,13 +1250,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(channel_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( ibc.validate_tx( @@ -1242,13 +1333,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(channel_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( ibc.validate_tx( @@ -1273,13 +1372,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(port_key(&get_port_id())); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( ibc.validate_tx( @@ -1305,13 +1412,22 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); let cap_key = capability_key(index); keys_changed.insert(cap_key); let verifiers = BTreeSet::new(); + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( @@ -1378,13 +1494,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(seq_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( ibc.validate_tx( @@ -1457,13 +1581,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(seq_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( ibc.validate_tx( @@ -1541,13 +1673,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(seq_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( ibc.validate_tx( @@ -1617,13 +1757,21 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); let mut keys_changed = BTreeSet::new(); keys_changed.insert(commitment_key); let verifiers = BTreeSet::new(); - + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( ibc.validate_tx( @@ -1701,12 +1849,20 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); - let mut keys_changed = BTreeSet::new(); keys_changed.insert(receipt_key); let verifiers = BTreeSet::new(); + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( @@ -1741,12 +1897,20 @@ mod tests { let gas_meter = VpGasMeter::new(0); let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); - let ctx = Ctx::new(&storage, &write_log, &tx, gas_meter, vp_wasm_cache); - let mut keys_changed = BTreeSet::new(); keys_changed.insert(ack_key); let verifiers = BTreeSet::new(); + let ctx = Ctx::new( + &ADDRESS, + &storage, + &write_log, + &tx, + gas_meter, + &keys_changed, + &verifiers, + vp_wasm_cache, + ); let ibc = Ibc { ctx }; assert!( diff --git a/shared/src/ledger/ibc/vp/port.rs b/shared/src/ledger/ibc/vp/port.rs index 5efade84bd..9b91aac957 100644 --- a/shared/src/ledger/ibc/vp/port.rs +++ b/shared/src/ledger/ibc/vp/port.rs @@ -16,6 +16,7 @@ use crate::ibc::core::ics05_port::context::{CapabilityReader, PortReader}; use crate::ibc::core::ics05_port::error::Error as Ics05Error; use crate::ibc::core::ics24_host::identifier::PortId; use crate::ibc::core::ics26_routing::context::ModuleId; +use crate::ledger::native_vp::VpEnv; use crate::ledger::storage::traits::StorageHasher; use crate::ledger::storage::{self as ledger_storage}; use crate::types::storage::Key; @@ -127,7 +128,7 @@ where fn get_port_by_capability(&self, cap: &Capability) -> Result { let key = capability_key(cap.index()); - match self.ctx.read_post(&key) { + match self.ctx.read_bytes_post(&key) { Ok(Some(value)) => { let id = std::str::from_utf8(&value).map_err(|e| { Error::InvalidPort(format!( @@ -164,7 +165,7 @@ where port_id: &PortId, ) -> Ics05Result<(ModuleId, PortCapability)> { let key = port_key(port_id); - match self.ctx.read_post(&key) { + match self.ctx.read_bytes_post(&key) { Ok(Some(value)) => { let index: [u8; 8] = value .try_into() diff --git a/shared/src/ledger/ibc/vp/token.rs b/shared/src/ledger/ibc/vp/token.rs index 806e26711f..13823efcf8 100644 --- a/shared/src/ledger/ibc/vp/token.rs +++ b/shared/src/ledger/ibc/vp/token.rs @@ -10,7 +10,7 @@ use crate::ibc::applications::ics20_fungible_token_transfer::msgs::transfer::Msg use crate::ibc::core::ics04_channel::msgs::PacketMsg; use crate::ibc::core::ics04_channel::packet::Packet; use crate::ibc::core::ics26_routing::msgs::Ics26Envelope; -use crate::ledger::native_vp::{self, Ctx, NativeVp}; +use crate::ledger::native_vp::{self, Ctx, NativeVp, VpEnv}; use crate::ledger::storage::traits::StorageHasher; use crate::ledger::storage::{self as ledger_storage}; use crate::proto::SignedTxData; @@ -137,9 +137,10 @@ where // sink zone let target = Address::Internal(InternalAddress::IbcBurn); let target_key = token::balance_key(&token, &target); - let post = - try_decode_token_amount(self.ctx.read_temp(&target_key)?)? - .unwrap_or_default(); + let post = try_decode_token_amount( + self.ctx.read_bytes_temp(&target_key)?, + )? + .unwrap_or_default(); // the previous balance of the burn address should be zero post.change() } else { @@ -150,11 +151,13 @@ where msg.source_channel.to_string(), )); let target_key = token::balance_key(&token, &target); - let pre = try_decode_token_amount(self.ctx.read_pre(&target_key)?)? - .unwrap_or_default(); - let post = - try_decode_token_amount(self.ctx.read_post(&target_key)?)? + let pre = + try_decode_token_amount(self.ctx.read_bytes_pre(&target_key)?)? .unwrap_or_default(); + let post = try_decode_token_amount( + self.ctx.read_bytes_post(&target_key)?, + )? + .unwrap_or_default(); post.change() - pre.change() }; @@ -190,19 +193,22 @@ where packet.destination_channel.to_string(), )); let source_key = token::balance_key(&token, &source); - let pre = try_decode_token_amount(self.ctx.read_pre(&source_key)?)? - .unwrap_or_default(); - let post = - try_decode_token_amount(self.ctx.read_post(&source_key)?)? + let pre = + try_decode_token_amount(self.ctx.read_bytes_pre(&source_key)?)? .unwrap_or_default(); + let post = try_decode_token_amount( + self.ctx.read_bytes_post(&source_key)?, + )? + .unwrap_or_default(); pre.change() - post.change() } else { // the sender is the source let source = Address::Internal(InternalAddress::IbcMint); let source_key = token::balance_key(&token, &source); - let post = - try_decode_token_amount(self.ctx.read_temp(&source_key)?)? - .unwrap_or_default(); + let post = try_decode_token_amount( + self.ctx.read_bytes_temp(&source_key)?, + )? + .unwrap_or_default(); // the previous balance of the mint address should be the maximum Amount::max().change() - post.change() }; @@ -236,9 +242,10 @@ where // sink zone: mint the token for the refund let source = Address::Internal(InternalAddress::IbcMint); let source_key = token::balance_key(&token, &source); - let post = - try_decode_token_amount(self.ctx.read_temp(&source_key)?)? - .unwrap_or_default(); + let post = try_decode_token_amount( + self.ctx.read_bytes_temp(&source_key)?, + )? + .unwrap_or_default(); // the previous balance of the mint address should be the maximum Amount::max().change() - post.change() } else { @@ -249,11 +256,13 @@ where packet.source_channel.to_string(), )); let source_key = token::balance_key(&token, &source); - let pre = try_decode_token_amount(self.ctx.read_pre(&source_key)?)? - .unwrap_or_default(); - let post = - try_decode_token_amount(self.ctx.read_post(&source_key)?)? + let pre = + try_decode_token_amount(self.ctx.read_bytes_pre(&source_key)?)? .unwrap_or_default(); + let post = try_decode_token_amount( + self.ctx.read_bytes_post(&source_key)?, + )? + .unwrap_or_default(); pre.change() - post.change() }; diff --git a/shared/src/ledger/mod.rs b/shared/src/ledger/mod.rs index 2d545f96f2..ef92b1e2d9 100644 --- a/shared/src/ledger/mod.rs +++ b/shared/src/ledger/mod.rs @@ -7,6 +7,8 @@ pub mod ibc; pub mod native_vp; pub mod parameters; pub mod pos; +pub mod slash_fund; pub mod storage; -pub mod treasury; +pub mod storage_api; +pub mod tx_env; pub mod vp_env; diff --git a/shared/src/ledger/native_vp.rs b/shared/src/ledger/native_vp.rs index 594db7f8d1..6e3d8ad2af 100644 --- a/shared/src/ledger/native_vp.rs +++ b/shared/src/ledger/native_vp.rs @@ -5,8 +5,9 @@ use std::collections::BTreeSet; use borsh::BorshDeserialize; use eyre::Context; -use thiserror::Error; +use super::storage_api::{self, ResultExt, StorageRead}; +pub use super::vp_env::VpEnv; use crate::ledger::gas::VpGasMeter; use crate::ledger::storage::traits::StorageHasher; use crate::ledger::storage::write_log::WriteLog; @@ -14,19 +15,15 @@ use crate::ledger::storage::Storage; use crate::ledger::{storage, vp_env}; use crate::proto::Tx; use crate::types::address::{Address, InternalAddress}; +use crate::types::hash::Hash; use crate::types::storage::{BlockHash, BlockHeight, Epoch, Key}; use crate::vm::prefix_iter::PrefixIterators; use crate::vm::WasmCacheAccess; -#[allow(missing_docs)] -#[derive(Error, Debug)] -pub enum Error { - #[error("Host context error: {0}")] - ContextError(vp_env::RuntimeError), -} - -/// Native VP function result -pub type Result = std::result::Result; +/// Possible error in a native VP host function call +/// The `storage_api::Error` may wrap the `vp_env::RuntimeError` and can +/// be extended with other custom errors when using `trait VpEnv`. +pub type Error = storage_api::Error; /// A native VP module should implement its validation logic using this trait. pub trait NativeVp { @@ -57,6 +54,8 @@ where H: StorageHasher, CA: WasmCacheAccess, { + /// The address of the account that owns the VP + pub address: &'a Address, /// Storage prefix iterators. pub iterators: RefCell>, /// VP gas meter. @@ -67,6 +66,11 @@ where pub write_log: &'a WriteLog, /// The transaction code is used for signature verification pub tx: &'a Tx, + /// The storage keys that have been changed. Used for calls to `eval`. + pub keys_changed: &'a BTreeSet, + /// The verifiers whose validity predicates should be triggered. Used for + /// calls to `eval`. + pub verifiers: &'a BTreeSet
, /// VP WASM compilation cache #[cfg(feature = "wasm-runtime")] pub vp_wasm_cache: crate::vm::wasm::VpCache, @@ -75,6 +79,30 @@ where pub cache_access: std::marker::PhantomData, } +/// Read access to the prior storage (state before tx execution) via +/// [`trait@StorageRead`]. +#[derive(Debug)] +pub struct CtxPreStorageRead<'view, 'a: 'view, DB, H, CA> +where + DB: storage::DB + for<'iter> storage::DBIter<'iter>, + H: StorageHasher, + CA: WasmCacheAccess, +{ + ctx: &'view Ctx<'a, DB, H, CA>, +} + +/// Read access to the posterior storage (state after tx execution) via +/// [`trait@StorageRead`]. +#[derive(Debug)] +pub struct CtxPostStorageRead<'view, 'a: 'view, DB, H, CA> +where + DB: storage::DB + for<'iter> storage::DBIter<'iter>, + H: StorageHasher, + CA: WasmCacheAccess, +{ + ctx: &'view Ctx<'a, DB, H, CA>, +} + impl<'a, DB, H, CA> Ctx<'a, DB, H, CA> where DB: 'static + storage::DB + for<'iter> storage::DBIter<'iter>, @@ -82,20 +110,27 @@ where CA: 'static + WasmCacheAccess, { /// Initialize a new context for native VP call + #[allow(clippy::too_many_arguments)] pub fn new( + address: &'a Address, storage: &'a Storage, write_log: &'a WriteLog, tx: &'a Tx, gas_meter: VpGasMeter, + keys_changed: &'a BTreeSet, + verifiers: &'a BTreeSet
, #[cfg(feature = "wasm-runtime")] vp_wasm_cache: crate::vm::wasm::VpCache, ) -> Self { Self { + address, iterators: RefCell::new(PrefixIterators::default()), gas_meter: RefCell::new(gas_meter), storage, write_log, tx, + keys_changed, + verifiers, #[cfg(feature = "wasm-runtime")] vp_wasm_cache, #[cfg(not(feature = "wasm-runtime"))] @@ -104,153 +139,279 @@ where } /// Add a gas cost incured in a validity predicate - pub fn add_gas(&self, used_gas: u64) -> Result<()> { + pub fn add_gas(&self, used_gas: u64) -> Result<(), vp_env::RuntimeError> { vp_env::add_gas(&mut *self.gas_meter.borrow_mut(), used_gas) - .map_err(Error::ContextError) } - /// Storage read prior state (before tx execution). It will try to read from - /// the storage. - pub fn read_pre(&self, key: &Key) -> Result>> { + /// Read access to the prior storage (state before tx execution) + /// via [`trait@StorageRead`]. + pub fn pre<'view>(&'view self) -> CtxPreStorageRead<'view, 'a, DB, H, CA> { + CtxPreStorageRead { ctx: self } + } + + /// Read access to the posterior storage (state after tx execution) + /// via [`trait@StorageRead`]. + pub fn post<'view>( + &'view self, + ) -> CtxPostStorageRead<'view, 'a, DB, H, CA> { + CtxPostStorageRead { ctx: self } + } +} + +impl<'view, 'a, DB, H, CA> StorageRead<'view> + for CtxPreStorageRead<'view, 'a, DB, H, CA> +where + DB: 'static + storage::DB + for<'iter> storage::DBIter<'iter>, + H: 'static + StorageHasher, + CA: 'static + WasmCacheAccess, +{ + type PrefixIter = >::PrefixIter; + + fn read_bytes( + &self, + key: &crate::types::storage::Key, + ) -> Result>, storage_api::Error> { vp_env::read_pre( - &mut *self.gas_meter.borrow_mut(), - self.storage, - self.write_log, + &mut *self.ctx.gas_meter.borrow_mut(), + self.ctx.storage, + self.ctx.write_log, key, ) - .map_err(Error::ContextError) + .into_storage_result() } - /// Storage read posterior state (after tx execution). It will try to read - /// from the write log first and if no entry found then from the - /// storage. - pub fn read_post(&self, key: &Key) -> Result>> { + fn has_key( + &self, + key: &crate::types::storage::Key, + ) -> Result { + vp_env::has_key_pre( + &mut *self.ctx.gas_meter.borrow_mut(), + self.ctx.storage, + key, + ) + .into_storage_result() + } + + fn rev_iter_prefix( + &self, + prefix: &crate::types::storage::Key, + ) -> storage_api::Result { + self.ctx.rev_iter_prefix(prefix).into_storage_result() + } + + fn iter_next( + &self, + iter: &mut Self::PrefixIter, + ) -> Result)>, storage_api::Error> { + vp_env::iter_pre_next::(&mut *self.ctx.gas_meter.borrow_mut(), iter) + .into_storage_result() + } + + // ---- Methods below are implemented in `self.ctx`, because they are + // the same in `pre/post` ---- + + fn iter_prefix( + &self, + prefix: &crate::types::storage::Key, + ) -> Result { + self.ctx.iter_prefix(prefix) + } + + fn get_chain_id(&self) -> Result { + self.ctx.get_chain_id() + } + + fn get_block_height(&self) -> Result { + self.ctx.get_block_height() + } + + fn get_block_hash(&self) -> Result { + self.ctx.get_block_hash() + } + + fn get_block_epoch(&self) -> Result { + self.ctx.get_block_epoch() + } +} + +impl<'view, 'a, DB, H, CA> StorageRead<'view> + for CtxPostStorageRead<'view, 'a, DB, H, CA> +where + DB: 'static + storage::DB + for<'iter> storage::DBIter<'iter>, + H: 'static + StorageHasher, + CA: 'static + WasmCacheAccess, +{ + type PrefixIter = >::PrefixIter; + + fn read_bytes( + &self, + key: &crate::types::storage::Key, + ) -> Result>, storage_api::Error> { vp_env::read_post( - &mut *self.gas_meter.borrow_mut(), - self.storage, - self.write_log, + &mut *self.ctx.gas_meter.borrow_mut(), + self.ctx.storage, + self.ctx.write_log, key, ) - .map_err(Error::ContextError) + .into_storage_result() } - /// Storage read temporary state (after tx execution). It will try to read - /// from only the write log. - pub fn read_temp(&self, key: &Key) -> Result>> { - vp_env::read_temp( - &mut *self.gas_meter.borrow_mut(), - self.write_log, + fn has_key( + &self, + key: &crate::types::storage::Key, + ) -> Result { + vp_env::has_key_post( + &mut *self.ctx.gas_meter.borrow_mut(), + self.ctx.storage, + self.ctx.write_log, key, ) - .map_err(Error::ContextError) + .into_storage_result() } - /// Storage `has_key` in prior state (before tx execution). It will try to - /// read from the storage. - pub fn has_key_pre(&self, key: &Key) -> Result { - vp_env::has_key_pre( + fn rev_iter_prefix( + &self, + prefix: &crate::types::storage::Key, + ) -> storage_api::Result { + self.ctx.rev_iter_prefix(prefix).into_storage_result() + } + + fn iter_next( + &self, + iter: &mut Self::PrefixIter, + ) -> Result)>, storage_api::Error> { + vp_env::iter_post_next::( + &mut *self.ctx.gas_meter.borrow_mut(), + self.ctx.write_log, + iter, + ) + .into_storage_result() + } + + // ---- Methods below are implemented in `self.ctx`, because they are + // the same in `pre/post` ---- + + fn iter_prefix( + &self, + prefix: &crate::types::storage::Key, + ) -> Result { + self.ctx.iter_prefix(prefix) + } + + fn get_chain_id(&self) -> Result { + self.ctx.get_chain_id() + } + + fn get_block_height(&self) -> Result { + self.ctx.get_block_height() + } + + fn get_block_hash(&self) -> Result { + self.ctx.get_block_hash() + } + + fn get_block_epoch(&self) -> Result { + self.ctx.get_block_epoch() + } +} + +impl<'view, 'a: 'view, DB, H, CA> VpEnv<'view> for Ctx<'a, DB, H, CA> +where + DB: 'static + storage::DB + for<'iter> storage::DBIter<'iter>, + H: 'static + StorageHasher, + CA: 'static + WasmCacheAccess, +{ + type Post = CtxPostStorageRead<'view, 'a, DB, H, CA>; + type Pre = CtxPreStorageRead<'view, 'a, DB, H, CA>; + type PrefixIter = >::PrefixIter; + + fn pre(&'view self) -> Self::Pre { + CtxPreStorageRead { ctx: self } + } + + fn post(&'view self) -> Self::Post { + CtxPostStorageRead { ctx: self } + } + + fn read_temp( + &self, + key: &Key, + ) -> Result, storage_api::Error> { + vp_env::read_temp( &mut *self.gas_meter.borrow_mut(), - self.storage, + self.write_log, key, ) - .map_err(Error::ContextError) + .map(|data| data.and_then(|t| T::try_from_slice(&t[..]).ok())) + .into_storage_result() } - /// Storage `has_key` in posterior state (after tx execution). It will try - /// to check the write log first and if no entry found then the storage. - pub fn has_key_post(&self, key: &Key) -> Result { - vp_env::has_key_post( + fn read_bytes_temp( + &self, + key: &Key, + ) -> Result>, storage_api::Error> { + vp_env::read_temp( &mut *self.gas_meter.borrow_mut(), - self.storage, self.write_log, key, ) - .map_err(Error::ContextError) + .into_storage_result() } - /// Getting the chain ID. - pub fn get_chain_id(&self) -> Result { + fn get_chain_id(&'view self) -> Result { vp_env::get_chain_id(&mut *self.gas_meter.borrow_mut(), self.storage) - .map_err(Error::ContextError) + .into_storage_result() } - /// Getting the block height. The height is that of the block to which the - /// current transaction is being applied. - pub fn get_block_height(&self) -> Result { + fn get_block_height( + &'view self, + ) -> Result { vp_env::get_block_height( &mut *self.gas_meter.borrow_mut(), self.storage, ) - .map_err(Error::ContextError) + .into_storage_result() } - /// Getting the block hash. The height is that of the block to which the - /// current transaction is being applied. - pub fn get_block_hash(&self) -> Result { + fn get_block_hash(&'view self) -> Result { vp_env::get_block_hash(&mut *self.gas_meter.borrow_mut(), self.storage) - .map_err(Error::ContextError) + .into_storage_result() } - /// Getting the block epoch. The epoch is that of the block to which the - /// current transaction is being applied. - pub fn get_block_epoch(&self) -> Result { + fn get_block_epoch(&'view self) -> Result { vp_env::get_block_epoch(&mut *self.gas_meter.borrow_mut(), self.storage) - .map_err(Error::ContextError) + .into_storage_result() } - /// Storage prefix iterator. It will try to get an iterator from the - /// storage. - pub fn iter_prefix( - &self, + fn iter_prefix( + &'view self, prefix: &Key, - ) -> Result<>::PrefixIter> { + ) -> Result { vp_env::iter_prefix( &mut *self.gas_meter.borrow_mut(), self.storage, prefix, ) - .map_err(Error::ContextError) + .into_storage_result() } - /// Storage prefix iterator for prior state (before tx execution). It will - /// try to read from the storage. - pub fn iter_pre_next( + fn rev_iter_prefix( &self, - iter: &mut >::PrefixIter, - ) -> Result)>> { - vp_env::iter_pre_next::(&mut *self.gas_meter.borrow_mut(), iter) - .map_err(Error::ContextError) + prefix: &Key, + ) -> Result { + vp_env::rev_iter_prefix( + &mut *self.gas_meter.borrow_mut(), + self.storage, + prefix, + ) + .into_storage_result() } - /// Storage prefix iterator next for posterior state (after tx execution). - /// It will try to read from the write log first and if no entry found - /// then from the storage. - pub fn iter_post_next( + fn eval( &self, - iter: &mut >::PrefixIter, - ) -> Result)>> { - vp_env::iter_post_next::( - &mut *self.gas_meter.borrow_mut(), - self.write_log, - iter, - ) - .map_err(Error::ContextError) - } - - /// Evaluate a validity predicate with given data. The address, changed - /// storage keys and verifiers will have the same values as the input to - /// caller's validity predicate. - /// - /// If the execution fails for whatever reason, this will return `false`. - /// Otherwise returns the result of evaluation. - pub fn eval( - &mut self, - address: &Address, - keys_changed: &BTreeSet, - verifiers: &BTreeSet
, vp_code: Vec, input_data: Vec, - ) -> bool { + ) -> Result { #[cfg(feature = "wasm-runtime")] { use std::marker::PhantomData; @@ -266,41 +427,56 @@ where let mut iterators: PrefixIterators<'_, DB> = PrefixIterators::default(); let mut result_buffer: Option> = None; + let mut vp_wasm_cache = self.vp_wasm_cache.clone(); let ctx = VpCtx::new( - address, + self.address, self.storage, self.write_log, &mut *self.gas_meter.borrow_mut(), self.tx, &mut iterators, - verifiers, + self.verifiers, &mut result_buffer, - keys_changed, + self.keys_changed, &eval_runner, - &mut self.vp_wasm_cache, + &mut vp_wasm_cache, ); match eval_runner.eval_native_result(ctx, vp_code, input_data) { - Ok(result) => result, + Ok(result) => Ok(result), Err(err) => { tracing::warn!( "VP eval from a native VP failed with: {}", err ); - false + Ok(false) } } } #[cfg(not(feature = "wasm-runtime"))] { - let _ = (address, keys_changed, verifiers, vp_code, input_data); + // This line is here to prevent unused var clippy warning + let _ = (vp_code, input_data); unimplemented!( "The \"wasm-runtime\" feature must be enabled to use the \ `eval` function." ) } } + + fn verify_tx_signature( + &self, + pk: &crate::types::key::common::PublicKey, + sig: &crate::types::key::common::Signature, + ) -> Result { + Ok(self.tx.verify_sig(pk, sig).is_ok()) + } + + fn get_tx_code_hash(&self) -> Result { + vp_env::get_tx_code_hash(&mut *self.gas_meter.borrow_mut(), self.tx) + .into_storage_result() + } } /// A convenience trait for reading and automatically deserializing a value from @@ -347,8 +523,8 @@ where where T: BorshDeserialize, { - let maybe_bytes = Ctx::read_post(self, key) - .wrap_err_with(|| format!("couldn't read_post {}", key))?; + let maybe_bytes = Ctx::read_bytes_post(self, key) + .wrap_err_with(|| format!("couldn't read_bytes_post {}", key))?; Self::deserialize_if_present(maybe_bytes) } @@ -358,8 +534,8 @@ where where T: BorshDeserialize, { - let maybe_bytes = Ctx::read_pre(self, key) - .wrap_err_with(|| format!("couldn't read_pre {}", key))?; + let maybe_bytes = Ctx::read_bytes_pre(self, key) + .wrap_err_with(|| format!("couldn't read_bytes_pre {}", key))?; Self::deserialize_if_present(maybe_bytes) } } diff --git a/shared/src/ledger/pos/mod.rs b/shared/src/ledger/pos/mod.rs index cc068f010e..e3f7a1956e 100644 --- a/shared/src/ledger/pos/mod.rs +++ b/shared/src/ledger/pos/mod.rs @@ -15,6 +15,7 @@ use namada_proof_of_stake::PosBase; pub use storage::*; pub use vp::PosVP; +use super::storage_api; use crate::ledger::storage::traits::StorageHasher; use crate::ledger::storage::{self as ledger_storage, Storage}; use crate::types::address::{self, Address, InternalAddress}; @@ -119,3 +120,192 @@ impl self.try_into() } } + +// The error conversions are needed to implement `PosActions` in +// `tx_prelude/src/proof_of_stake.rs` +impl From> + for storage_api::Error +{ + fn from(err: namada_proof_of_stake::BecomeValidatorError
) -> Self { + Self::new(err) + } +} + +impl From> for storage_api::Error { + fn from(err: namada_proof_of_stake::BondError
) -> Self { + Self::new(err) + } +} + +impl From> + for storage_api::Error +{ + fn from( + err: namada_proof_of_stake::UnbondError, + ) -> Self { + Self::new(err) + } +} + +impl From> + for storage_api::Error +{ + fn from(err: namada_proof_of_stake::WithdrawError
) -> Self { + Self::new(err) + } +} + +#[macro_use] +mod macros { + /// Implement `PosReadOnly` for a type that implements + /// [`trait@crate::ledger::storage_api::StorageRead`]. + /// + /// Excuse the horrible syntax - we haven't found a better way to use this + /// for native_vp `CtxPreStorageRead`/`CtxPostStorageRead`, which have + /// generics and explicit lifetimes. + /// + /// # Examples + /// + /// ```ignore + /// impl_pos_read_only! { impl PosReadOnly for X } + /// ``` + #[macro_export] + macro_rules! impl_pos_read_only { + ( + // Type error type has to be declared before the impl. + // This error type must `impl From for $error`. + type $error:tt = $err_ty:ty ; + // Matches anything, so that we can use lifetimes and generic types. + // This expects `impl(<.*>)? PoSReadOnly for $ty(<.*>)?`. + $( $any:tt )* ) + => { + $( $any )* + { + type Address = $crate::types::address::Address; + type $error = $err_ty; + type PublicKey = $crate::types::key::common::PublicKey; + type TokenAmount = $crate::types::token::Amount; + type TokenChange = $crate::types::token::Change; + + const POS_ADDRESS: Self::Address = $crate::ledger::pos::ADDRESS; + + fn staking_token_address() -> Self::Address { + $crate::ledger::pos::staking_token_address() + } + + fn read_pos_params(&self) -> std::result::Result { + let value = $crate::ledger::storage_api::StorageRead::read_bytes(self, ¶ms_key())?.unwrap(); + Ok($crate::ledger::storage::types::decode(value).unwrap()) + } + + fn read_validator_staking_reward_address( + &self, + key: &Self::Address, + ) -> std::result::Result, Self::Error> { + let value = $crate::ledger::storage_api::StorageRead::read_bytes( + self, + &validator_staking_reward_address_key(key), + )?; + Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap())) + } + + fn read_validator_consensus_key( + &self, + key: &Self::Address, + ) -> std::result::Result, Self::Error> { + let value = + $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_consensus_key_key(key))?; + Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap())) + } + + fn read_validator_state( + &self, + key: &Self::Address, + ) -> std::result::Result, Self::Error> { + let value = $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_state_key(key))?; + Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap())) + } + + fn read_validator_total_deltas( + &self, + key: &Self::Address, + ) -> std::result::Result, Self::Error> { + let value = + $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_total_deltas_key(key))?; + Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap())) + } + + fn read_validator_voting_power( + &self, + key: &Self::Address, + ) -> std::result::Result, Self::Error> { + let value = + $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_voting_power_key(key))?; + Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap())) + } + + fn read_validator_slashes( + &self, + key: &Self::Address, + ) -> std::result::Result, Self::Error> { + let value = $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_slashes_key(key))?; + Ok(value + .map(|value| $crate::ledger::storage::types::decode(value).unwrap()) + .unwrap_or_default()) + } + + fn read_bond( + &self, + key: &BondId, + ) -> std::result::Result, Self::Error> { + let value = $crate::ledger::storage_api::StorageRead::read_bytes(self, &bond_key(key))?; + Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap())) + } + + fn read_unbond( + &self, + key: &BondId, + ) -> std::result::Result, Self::Error> { + let value = $crate::ledger::storage_api::StorageRead::read_bytes(self, &unbond_key(key))?; + Ok(value.map(|value| $crate::ledger::storage::types::decode(value).unwrap())) + } + + fn read_validator_set( + &self, + ) -> std::result::Result { + let value = + $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_set_key())?.unwrap(); + Ok($crate::ledger::storage::types::decode(value).unwrap()) + } + + fn read_total_voting_power( + &self, + ) -> std::result::Result { + let value = + $crate::ledger::storage_api::StorageRead::read_bytes(self, &total_voting_power_key())?.unwrap(); + Ok($crate::ledger::storage::types::decode(value).unwrap()) + } + + // TODO: return result + fn read_validator_eth_cold_key( + &self, + key: &Self::Address, + ) -> Option> { + let value = + $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_eth_cold_key_key(key)).unwrap().unwrap(); + Some($crate::ledger::storage::types::decode(value).unwrap()) + } + + // TODO: return result + fn read_validator_eth_hot_key( + &self, + key: &Self::Address, + ) -> Option> { + let value = + $crate::ledger::storage_api::StorageRead::read_bytes(self, &validator_eth_hot_key_key(key)).unwrap().unwrap(); + Some($crate::ledger::storage::types::decode(value).unwrap()) + } + } + } +} +} diff --git a/shared/src/ledger/pos/storage.rs b/shared/src/ledger/pos/storage.rs index 5afaa81570..a471312737 100644 --- a/shared/src/ledger/pos/storage.rs +++ b/shared/src/ledger/pos/storage.rs @@ -507,25 +507,39 @@ where &self, key: &Self::Address, ) -> Option> { + let public_key_type = std::any::type_name::(); + tracing::debug!(?public_key_type, ?key, "Reading eth cold key"); let (value, _gas) = self.read(&validator_eth_cold_key_key(key)).unwrap(); - value.map(|value| decode(value).unwrap()) + value.map(|value| { + let v = value.clone(); + decode(value).unwrap_or_else(|_| panic!("Couldn't decode {:?}", v)) + }) } fn read_validator_eth_hot_key( &self, key: &Self::Address, ) -> Option> { + let public_key_type = std::any::type_name::(); + tracing::debug!(?public_key_type, ?key, "Reading eth hot key"); let (value, _gas) = self.read(&validator_eth_hot_key_key(key)).unwrap(); - value.map(|value| decode(value).unwrap()) + value.map(|value| { + let v = value.clone(); + decode(value).unwrap_or_else(|_| panic!("Couldn't decode {:?}", v)) + }) } fn write_pos_params(&mut self, params: &PosParams) { self.write(¶ms_key(), encode(params)).unwrap(); } - fn write_validator_address_raw_hash(&mut self, address: &Self::Address) { - let raw_hash = address.raw_hash().unwrap(); + fn write_validator_address_raw_hash( + &mut self, + address: &Self::Address, + consensus_key: &Self::PublicKey, + ) { + let raw_hash = key::tm_consensus_key_raw_hash(consensus_key); self.write(&validator_address_raw_hash_key(raw_hash), encode(address)) .unwrap(); } @@ -604,6 +618,13 @@ where address: &Self::Address, value: &types::ValidatorEthKey, ) { + let public_key_type = std::any::type_name::(); + tracing::debug!( + ?public_key_type, + ?value, + ?address, + "Writing eth cold key" + ); self.write(&validator_eth_cold_key_key(address), encode(value)) .unwrap(); } @@ -613,6 +634,13 @@ where address: &Self::Address, value: &types::ValidatorEthKey, ) { + let public_key_type = std::any::type_name::(); + tracing::debug!( + ?public_key_type, + ?value, + ?address, + "Writing eth hot key" + ); self.write(&validator_eth_hot_key_key(address), encode(value)) .unwrap(); } diff --git a/shared/src/ledger/pos/vp.rs b/shared/src/ledger/pos/vp.rs index 61d2dc0ffb..c3139c9d5e 100644 --- a/shared/src/ledger/pos/vp.rs +++ b/shared/src/ledger/pos/vp.rs @@ -20,25 +20,27 @@ use super::{ is_unbond_key, is_validator_set_key, is_validator_staking_reward_address_key, is_validator_total_deltas_key, is_validator_voting_power_key, params_key, staking_token_address, - total_voting_power_key, unbond_key, validator_consensus_key_key, - validator_eth_cold_key_key, validator_eth_hot_key_key, validator_set_key, - validator_slashes_key, validator_staking_reward_address_key, - validator_state_key, validator_total_deltas_key, - validator_voting_power_key, BondId, Bonds, Unbonds, ValidatorConsensusKeys, - ValidatorSets, ValidatorTotalDeltas, + storage_api, total_voting_power_key, unbond_key, + validator_consensus_key_key, validator_eth_cold_key_key, + validator_eth_hot_key_key, validator_set_key, validator_slashes_key, + validator_staking_reward_address_key, validator_state_key, + validator_total_deltas_key, validator_voting_power_key, BondId, Bonds, + Unbonds, ValidatorConsensusKeys, ValidatorSets, ValidatorTotalDeltas, }; +use crate::impl_pos_read_only; use crate::ledger::governance::vp::is_proposal_accepted; -use crate::ledger::native_vp::{self, Ctx, NativeVp}; +use crate::ledger::native_vp::{ + self, Ctx, CtxPostStorageRead, CtxPreStorageRead, NativeVp, VpEnv, +}; use crate::ledger::pos::{ is_validator_address_raw_hash_key, is_validator_consensus_key_key, is_validator_state_key, }; use crate::ledger::storage::traits::StorageHasher; -use crate::ledger::storage::types::decode; use crate::ledger::storage::{self as ledger_storage}; use crate::types::address::{Address, InternalAddress}; use crate::types::storage::{Key, KeySeg}; -use crate::types::{key, token}; +use crate::types::token; use crate::vm::WasmCacheAccess; #[allow(missing_docs)] @@ -112,7 +114,7 @@ where &self, tx_data: &[u8], keys_changed: &BTreeSet, - verifiers: &BTreeSet
, + _verifiers: &BTreeSet
, ) -> Result { use validation::Data; use validation::DataUpdate::{self, *}; @@ -121,6 +123,7 @@ where let addr = Address::Internal(Self::ADDR); let mut changes: Vec> = vec![]; let current_epoch = self.ctx.get_block_epoch()?; + for key in keys_changed { if is_params_key(key) { let proposal_id = u64::try_from_slice(tx_data).ok(); @@ -128,29 +131,19 @@ where Some(id) => return Ok(is_proposal_accepted(&self.ctx, id)), _ => return Ok(false), } - } else if let Some(owner) = key.is_validity_predicate() { - let has_pre = self.ctx.has_key_pre(key)?; - let has_post = self.ctx.has_key_post(key)?; - if has_pre && has_post { - // VP updates must be verified by the owner - return Ok(!verifiers.contains(owner)); - } else if has_pre || !has_post { - // VP cannot be deleted - return Ok(false); - } } else if is_validator_set_key(key) { - let pre = self.ctx.read_pre(key)?.and_then(|bytes| { + let pre = self.ctx.read_bytes_pre(key)?.and_then(|bytes| { ValidatorSets::try_from_slice(&bytes[..]).ok() }); - let post = self.ctx.read_post(key)?.and_then(|bytes| { + let post = self.ctx.read_bytes_post(key)?.and_then(|bytes| { ValidatorSets::try_from_slice(&bytes[..]).ok() }); changes.push(ValidatorSet(Data { pre, post })); } else if let Some(validator) = is_validator_state_key(key) { - let pre = self.ctx.read_pre(key)?.and_then(|bytes| { + let pre = self.ctx.read_bytes_pre(key)?.and_then(|bytes| { ValidatorStates::try_from_slice(&bytes[..]).ok() }); - let post = self.ctx.read_post(key)?.and_then(|bytes| { + let post = self.ctx.read_bytes_post(key)?.and_then(|bytes| { ValidatorStates::try_from_slice(&bytes[..]).ok() }); changes.push(Validator { @@ -162,11 +155,11 @@ where { let pre = self .ctx - .read_pre(key)? + .read_bytes_pre(key)? .and_then(|bytes| Address::try_from_slice(&bytes[..]).ok()); let post = self .ctx - .read_post(key)? + .read_bytes_post(key)? .and_then(|bytes| Address::try_from_slice(&bytes[..]).ok()); changes.push(Validator { address: validator.clone(), @@ -174,10 +167,10 @@ where }); } else if let Some(validator) = is_validator_consensus_key_key(key) { - let pre = self.ctx.read_pre(key)?.and_then(|bytes| { + let pre = self.ctx.read_bytes_pre(key)?.and_then(|bytes| { ValidatorConsensusKeys::try_from_slice(&bytes[..]).ok() }); - let post = self.ctx.read_post(key)?.and_then(|bytes| { + let post = self.ctx.read_bytes_post(key)?.and_then(|bytes| { ValidatorConsensusKeys::try_from_slice(&bytes[..]).ok() }); changes.push(Validator { @@ -185,10 +178,10 @@ where update: ConsensusKey(Data { pre, post }), }); } else if let Some(validator) = is_validator_total_deltas_key(key) { - let pre = self.ctx.read_pre(key)?.and_then(|bytes| { + let pre = self.ctx.read_bytes_pre(key)?.and_then(|bytes| { ValidatorTotalDeltas::try_from_slice(&bytes[..]).ok() }); - let post = self.ctx.read_post(key)?.and_then(|bytes| { + let post = self.ctx.read_bytes_post(key)?.and_then(|bytes| { ValidatorTotalDeltas::try_from_slice(&bytes[..]).ok() }); changes.push(Validator { @@ -196,10 +189,10 @@ where update: TotalDeltas(Data { pre, post }), }); } else if let Some(validator) = is_validator_voting_power_key(key) { - let pre = self.ctx.read_pre(key)?.and_then(|bytes| { + let pre = self.ctx.read_bytes_pre(key)?.and_then(|bytes| { ValidatorVotingPowers::try_from_slice(&bytes[..]).ok() }); - let post = self.ctx.read_post(key)?.and_then(|bytes| { + let post = self.ctx.read_bytes_post(key)?.and_then(|bytes| { ValidatorVotingPowers::try_from_slice(&bytes[..]).ok() }); changes.push(Validator { @@ -211,23 +204,12 @@ where { let pre = self .ctx - .read_pre(key)? + .read_bytes_pre(key)? .and_then(|bytes| Address::try_from_slice(&bytes[..]).ok()); let post = self .ctx - .read_post(key)? + .read_bytes_post(key)? .and_then(|bytes| Address::try_from_slice(&bytes[..]).ok()); - // Find the raw hashes of the addresses - let pre = pre.map(|pre| { - let raw_hash = - pre.raw_hash().map(String::from).unwrap_or_default(); - (pre, raw_hash) - }); - let post = post.map(|post| { - let raw_hash = - post.raw_hash().map(String::from).unwrap_or_default(); - (post, raw_hash) - }); changes.push(ValidatorAddressRawHash { raw_hash: raw_hash.to_string(), data: Data { pre, post }, @@ -238,26 +220,26 @@ where if owner != &addr { continue; } - let pre = self.ctx.read_pre(key)?.and_then(|bytes| { + let pre = self.ctx.read_bytes_pre(key)?.and_then(|bytes| { token::Amount::try_from_slice(&bytes[..]).ok() }); - let post = self.ctx.read_post(key)?.and_then(|bytes| { + let post = self.ctx.read_bytes_post(key)?.and_then(|bytes| { token::Amount::try_from_slice(&bytes[..]).ok() }); changes.push(Balance(Data { pre, post })); } else if let Some(bond_id) = is_bond_key(key) { let pre = self .ctx - .read_pre(key)? + .read_bytes_pre(key)? .and_then(|bytes| Bonds::try_from_slice(&bytes[..]).ok()); let post = self .ctx - .read_post(key)? + .read_bytes_post(key)? .and_then(|bytes| Bonds::try_from_slice(&bytes[..]).ok()); // For bonds, we need to look-up slashes let slashes = self .ctx - .read_pre(&validator_slashes_key(&bond_id.validator))? + .read_bytes_pre(&validator_slashes_key(&bond_id.validator))? .and_then(|bytes| Slashes::try_from_slice(&bytes[..]).ok()) .unwrap_or_default(); changes.push(Bond { @@ -268,16 +250,18 @@ where } else if let Some(unbond_id) = is_unbond_key(key) { let pre = self .ctx - .read_pre(key)? + .read_bytes_pre(key)? .and_then(|bytes| Unbonds::try_from_slice(&bytes[..]).ok()); let post = self .ctx - .read_post(key)? + .read_bytes_post(key)? .and_then(|bytes| Unbonds::try_from_slice(&bytes[..]).ok()); // For unbonds, we need to look-up slashes let slashes = self .ctx - .read_pre(&validator_slashes_key(&unbond_id.validator))? + .read_bytes_pre(&validator_slashes_key( + &unbond_id.validator, + ))? .and_then(|bytes| Slashes::try_from_slice(&bytes[..]).ok()) .unwrap_or_default(); changes.push(Unbond { @@ -286,10 +270,10 @@ where slashes, }); } else if is_total_voting_power_key(key) { - let pre = self.ctx.read_pre(key)?.and_then(|bytes| { + let pre = self.ctx.read_bytes_pre(key)?.and_then(|bytes| { TotalVotingPowers::try_from_slice(&bytes[..]).ok() }); - let post = self.ctx.read_post(key)?.and_then(|bytes| { + let post = self.ctx.read_bytes_post(key)?.and_then(|bytes| { TotalVotingPowers::try_from_slice(&bytes[..]).ok() }); changes.push(TotalVotingPower(Data { pre, post })); @@ -303,7 +287,7 @@ where } } - let params = self.read_pos_params(); + let params = self.ctx.pre().read_pos_params()?; let errors = validate(¶ms, changes, current_epoch); Ok(if errors.is_empty() { true @@ -317,123 +301,22 @@ where } } -impl PosReadOnly for PosVP<'_, D, H, CA> -where - D: 'static + ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter>, - H: 'static + StorageHasher, - CA: 'static + WasmCacheAccess, -{ - type Address = Address; - type PublicKey = key::common::PublicKey; - type TokenAmount = token::Amount; - type TokenChange = token::Change; - - const POS_ADDRESS: Self::Address = super::ADDRESS; - - fn staking_token_address() -> Self::Address { - super::staking_token_address() - } - - fn read_pos_params(&self) -> PosParams { - let value = self.ctx.read_pre(¶ms_key()).unwrap().unwrap(); - decode(value).unwrap() - } - - fn read_validator_staking_reward_address( - &self, - key: &Self::Address, - ) -> Option { - let value = self - .ctx - .read_pre(&validator_staking_reward_address_key(key)) - .unwrap(); - value.map(|value| decode(value).unwrap()) - } - - fn read_validator_consensus_key( - &self, - key: &Self::Address, - ) -> Option { - let value = self - .ctx - .read_pre(&validator_consensus_key_key(key)) - .unwrap(); - value.map(|value| decode(value).unwrap()) - } - - fn read_validator_state( - &self, - key: &Self::Address, - ) -> Option { - let value = self.ctx.read_pre(&validator_state_key(key)).unwrap(); - value.map(|value| decode(value).unwrap()) - } - - fn read_validator_total_deltas( - &self, - key: &Self::Address, - ) -> Option { - let value = - self.ctx.read_pre(&validator_total_deltas_key(key)).unwrap(); - value.map(|value| decode(value).unwrap()) - } - - fn read_validator_voting_power( - &self, - key: &Self::Address, - ) -> Option { - let value = - self.ctx.read_pre(&validator_voting_power_key(key)).unwrap(); - value.map(|value| decode(value).unwrap()) - } - - fn read_validator_slashes(&self, key: &Self::Address) -> Vec { - let value = self.ctx.read_pre(&validator_slashes_key(key)).unwrap(); - value - .map(|value| decode(value).unwrap()) - .unwrap_or_default() - } - - fn read_bond(&self, key: &BondId) -> Option { - let value = self.ctx.read_pre(&bond_key(key)).unwrap(); - value.map(|value| decode(value).unwrap()) - } - - fn read_unbond(&self, key: &BondId) -> Option { - let value = self.ctx.read_pre(&unbond_key(key)).unwrap(); - value.map(|value| decode(value).unwrap()) - } - - fn read_validator_set(&self) -> ValidatorSets { - let value = self.ctx.read_pre(&validator_set_key()).unwrap().unwrap(); - decode(value).unwrap() - } - - fn read_total_voting_power(&self) -> TotalVotingPowers { - let value = self - .ctx - .read_pre(&total_voting_power_key()) - .unwrap() - .unwrap(); - decode(value).unwrap() - } - - fn read_validator_eth_cold_key( - &self, - key: &Self::Address, - ) -> Option> { - let value = - self.ctx.read_pre(&validator_eth_cold_key_key(key)).unwrap(); - value.map(|value| decode(value).unwrap()) - } +impl_pos_read_only! { + type Error = storage_api::Error; + impl<'f, 'a, DB, H, CA> PosReadOnly for CtxPreStorageRead<'f, 'a, DB, H, CA> + where + DB: ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter> +'static, + H: StorageHasher +'static, + CA: WasmCacheAccess +'static +} - fn read_validator_eth_hot_key( - &self, - key: &Self::Address, - ) -> Option> { - let value = self.ctx.read_pre(&validator_eth_hot_key_key(key)).unwrap(); - value.map(|value| decode(value).unwrap()) - } +impl_pos_read_only! { + type Error = storage_api::Error; + impl<'f, 'a, DB, H, CA> PosReadOnly for CtxPostStorageRead<'f, 'a, DB, H, CA> + where + DB: ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter> +'static, + H: StorageHasher +'static, + CA: WasmCacheAccess +'static } impl From for Error { diff --git a/shared/src/ledger/slash_fund/mod.rs b/shared/src/ledger/slash_fund/mod.rs new file mode 100644 index 0000000000..e5ef72b5fe --- /dev/null +++ b/shared/src/ledger/slash_fund/mod.rs @@ -0,0 +1,107 @@ +//! SlashFund VP + +use std::collections::BTreeSet; + +/// SlashFund storage +pub mod storage; + +use borsh::BorshDeserialize; +use thiserror::Error; + +use self::storage as slash_fund_storage; +use super::governance::vp::is_proposal_accepted; +use super::storage::traits::StorageHasher; +use crate::ledger::native_vp::{self, Ctx, NativeVp}; +use crate::ledger::storage::{self as ledger_storage}; +use crate::types::address::{xan as nam, Address, InternalAddress}; +use crate::types::storage::Key; +use crate::types::token; +use crate::vm::WasmCacheAccess; + +/// Internal SlashFund address +pub const ADDRESS: Address = Address::Internal(InternalAddress::SlashFund); + +#[allow(missing_docs)] +#[derive(Error, Debug)] +pub enum Error { + #[error("Native VP error: {0}")] + NativeVpError(native_vp::Error), +} + +/// SlashFund functions result +pub type Result = std::result::Result; + +/// SlashFund VP +pub struct SlashFundVp<'a, DB, H, CA> +where + DB: ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter>, + H: StorageHasher, + CA: WasmCacheAccess, +{ + /// Context to interact with the host structures. + pub ctx: Ctx<'a, DB, H, CA>, +} + +impl<'a, DB, H, CA> NativeVp for SlashFundVp<'a, DB, H, CA> +where + DB: 'static + ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter>, + H: 'static + StorageHasher, + CA: 'static + WasmCacheAccess, +{ + type Error = Error; + + const ADDR: InternalAddress = InternalAddress::SlashFund; + + fn validate_tx( + &self, + tx_data: &[u8], + keys_changed: &BTreeSet, + _verifiers: &BTreeSet
, + ) -> Result { + let result = keys_changed.iter().all(|key| { + let key_type: KeyType = key.into(); + match key_type { + KeyType::BALANCE(addr) => { + if addr.ne(&ADDRESS) { + return true; + } + + let proposal_id = u64::try_from_slice(tx_data).ok(); + match proposal_id { + Some(id) => is_proposal_accepted(&self.ctx, id), + None => false, + } + } + KeyType::UNKNOWN_SLASH_FUND => false, + KeyType::UNKNOWN => true, + } + }); + Ok(result) + } +} + +#[allow(clippy::upper_case_acronyms)] +enum KeyType { + #[allow(clippy::upper_case_acronyms)] + BALANCE(Address), + #[allow(clippy::upper_case_acronyms)] + #[allow(non_camel_case_types)] + UNKNOWN_SLASH_FUND, + #[allow(clippy::upper_case_acronyms)] + UNKNOWN, +} + +impl From<&Key> for KeyType { + fn from(value: &Key) -> Self { + if slash_fund_storage::is_slash_fund_key(value) { + KeyType::UNKNOWN_SLASH_FUND + } else if token::is_any_token_balance_key(value).is_some() { + match token::is_balance_key(&nam(), value) { + Some(addr) => KeyType::BALANCE(addr.clone()), + None => KeyType::UNKNOWN, + } + } else { + KeyType::UNKNOWN + } + } +} diff --git a/shared/src/ledger/slash_fund/storage.rs b/shared/src/ledger/slash_fund/storage.rs new file mode 100644 index 0000000000..60d29f0f48 --- /dev/null +++ b/shared/src/ledger/slash_fund/storage.rs @@ -0,0 +1,7 @@ +use super::ADDRESS; +use crate::types::storage::{DbKeySeg, Key}; + +/// Check if a key is a slash fund key +pub fn is_slash_fund_key(key: &Key) -> bool { + matches!(&key.segments[0], DbKeySeg::AddressSeg(addr) if addr == &ADDRESS) +} diff --git a/shared/src/ledger/storage/mockdb.rs b/shared/src/ledger/storage/mockdb.rs index 99260d8333..234cad4498 100644 --- a/shared/src/ledger/storage/mockdb.rs +++ b/shared/src/ledger/storage/mockdb.rs @@ -427,7 +427,28 @@ impl<'iter> DBIter<'iter> for MockDB { let db_prefix = "subspace/".to_owned(); let prefix = format!("{}{}", db_prefix, prefix); let iter = self.0.borrow().clone().into_iter(); - MockPrefixIterator::new(MockIterator { prefix, iter }, db_prefix) + MockPrefixIterator::new( + MockIterator { + prefix, + iter, + reverse_order: false, + }, + db_prefix, + ) + } + + fn rev_iter_prefix(&'iter self, prefix: &Key) -> Self::PrefixIter { + let db_prefix = "subspace/".to_owned(); + let prefix = format!("{}{}", db_prefix, prefix); + let iter = self.0.borrow().clone().into_iter(); + MockPrefixIterator::new( + MockIterator { + prefix, + iter, + reverse_order: true, + }, + db_prefix, + ) } } @@ -437,21 +458,34 @@ pub struct MockIterator { prefix: String, /// The concrete iterator pub iter: btree_map::IntoIter>, + /// Is the iterator in reverse order? + reverse_order: bool, } /// A prefix iterator for the [`MockDB`]. pub type MockPrefixIterator = PrefixIterator; impl Iterator for MockIterator { - type Item = KVBytes; + type Item = Result; fn next(&mut self) -> Option { - for (key, val) in &mut self.iter { - if key.starts_with(&self.prefix) { - return Some(( - Box::from(key.as_bytes()), - Box::from(val.as_slice()), - )); + if self.reverse_order { + for (key, val) in (&mut self.iter).rev() { + if key.starts_with(&self.prefix) { + return Some(Ok(( + Box::from(key.as_bytes()), + Box::from(val.as_slice()), + ))); + } + } + } else { + for (key, val) in &mut self.iter { + if key.starts_with(&self.prefix) { + return Some(Ok(( + Box::from(key.as_bytes()), + Box::from(val.as_slice()), + ))); + } } } None @@ -464,7 +498,9 @@ impl Iterator for PrefixIterator { /// Returns the next pair and the gas cost fn next(&mut self) -> Option<(String, Vec, u64)> { match self.iter.next() { - Some((key, val)) => { + Some(result) => { + let (key, val) = + result.expect("Prefix iterator shouldn't fail"); let key = String::from_utf8(key.to_vec()) .expect("Cannot convert from bytes to key string"); match key.strip_prefix(&self.db_prefix) { diff --git a/shared/src/ledger/storage/mod.rs b/shared/src/ledger/storage/mod.rs index c2464c52d1..805caa6348 100644 --- a/shared/src/ledger/storage/mod.rs +++ b/shared/src/ledger/storage/mod.rs @@ -13,8 +13,9 @@ use std::array; use thiserror::Error; -use super::parameters; use super::parameters::Parameters; +use super::storage_api::{ResultExt, StorageRead, StorageWrite}; +use super::{parameters, storage_api}; use crate::ledger::gas::MIN_STORAGE_GAS; use crate::ledger::parameters::EpochDuration; use crate::ledger::storage::merkle_tree::{ @@ -247,8 +248,13 @@ pub trait DBIter<'iter> { /// The concrete type of the iterator type PrefixIter: Debug + Iterator, u64)>; - /// Read account subspace key value pairs with the given prefix from the DB + /// Read account subspace key value pairs with the given prefix from the DB, + /// ordered by the storage keys. fn iter_prefix(&'iter self, prefix: &Key) -> Self::PrefixIter; + + /// Read account subspace key value pairs with the given prefix from the DB, + /// reverse ordered by the storage keys. + fn rev_iter_prefix(&'iter self, prefix: &Key) -> Self::PrefixIter; } /// Atomic batch write. @@ -416,7 +422,7 @@ where } } - /// Returns a prefix iterator and the gas cost + /// Returns a prefix iterator, ordered by storage keys, and the gas cost pub fn iter_prefix( &self, prefix: &Key, @@ -424,17 +430,29 @@ where (self.db.iter_prefix(prefix), prefix.len() as _) } + /// Returns a prefix iterator, reverse ordered by storage keys, and the gas + /// cost + pub fn rev_iter_prefix( + &self, + prefix: &Key, + ) -> (>::PrefixIter, u64) { + (self.db.rev_iter_prefix(prefix), prefix.len() as _) + } + /// Write a value to the specified subspace and returns the gas cost and the /// size difference pub fn write( &mut self, key: &Key, - value: impl AsRef<[u8]> + Clone, + value: impl AsRef<[u8]>, ) -> Result<(u64, i64)> { + // Note that this method is the same as `StorageWrite::write_bytes`, + // but with gas and storage bytes len diff accounting tracing::debug!("storage write key {}", key,); - self.block.tree.update(key, value.clone())?; + let value = value.as_ref(); + self.block.tree.update(key, &value)?; - let len = value.as_ref().len(); + let len = value.len(); let gas = key.len() + len; let size_diff = self.db.write_subspace_val(self.last_height, key, value)?; @@ -444,6 +462,8 @@ where /// Delete the specified subspace and returns the gas cost and the size /// difference pub fn delete(&mut self, key: &Key) -> Result<(u64, i64)> { + // Note that this method is the same as `StorageWrite::delete`, + // but with gas and storage bytes len diff accounting let mut deleted_bytes_len = 0; if self.has_key(key)?.0 { self.block.tree.delete(key)?; @@ -720,6 +740,148 @@ where } } +impl<'iter, D, H> StorageRead<'iter> for Storage +where + D: DB + for<'iter_> DBIter<'iter_>, + H: StorageHasher, +{ + type PrefixIter = >::PrefixIter; + + fn read_bytes( + &self, + key: &crate::types::storage::Key, + ) -> std::result::Result>, storage_api::Error> { + self.db.read_subspace_val(key).into_storage_result() + } + + fn has_key( + &self, + key: &crate::types::storage::Key, + ) -> std::result::Result { + self.block.tree.has_key(key).into_storage_result() + } + + fn iter_prefix( + &'iter self, + prefix: &crate::types::storage::Key, + ) -> std::result::Result { + Ok(self.db.iter_prefix(prefix)) + } + + fn rev_iter_prefix( + &'iter self, + prefix: &crate::types::storage::Key, + ) -> std::result::Result { + Ok(self.db.rev_iter_prefix(prefix)) + } + + fn iter_next( + &self, + iter: &mut Self::PrefixIter, + ) -> std::result::Result)>, storage_api::Error> + { + Ok(iter.next().map(|(key, val, _gas)| (key, val))) + } + + fn get_chain_id(&self) -> std::result::Result { + Ok(self.chain_id.to_string()) + } + + fn get_block_height( + &self, + ) -> std::result::Result { + Ok(self.block.height) + } + + fn get_block_hash( + &self, + ) -> std::result::Result { + Ok(self.block.hash.clone()) + } + + fn get_block_epoch( + &self, + ) -> std::result::Result { + Ok(self.block.epoch) + } +} + +impl StorageWrite for Storage +where + D: DB + for<'iter> DBIter<'iter>, + H: StorageHasher, +{ + fn write_bytes( + &mut self, + key: &crate::types::storage::Key, + val: impl AsRef<[u8]>, + ) -> storage_api::Result<()> { + // Note that this method is the same as `Storage::write`, but without + // gas and storage bytes len diff accounting, because it can only be + // used by the protocol that has a direct mutable access to storage + let val = val.as_ref(); + self.block.tree.update(key, &val).into_storage_result()?; + let _ = self + .db + .write_subspace_val(self.block.height, key, val) + .into_storage_result()?; + Ok(()) + } + + fn delete( + &mut self, + key: &crate::types::storage::Key, + ) -> storage_api::Result<()> { + // Note that this method is the same as `Storage::delete`, but without + // gas and storage bytes len diff accounting, because it can only be + // used by the protocol that has a direct mutable access to storage + self.block.tree.delete(key).into_storage_result()?; + let _ = self + .db + .delete_subspace_val(self.block.height, key) + .into_storage_result()?; + Ok(()) + } +} + +impl StorageWrite for &mut Storage +where + D: DB + for<'iter> DBIter<'iter>, + H: StorageHasher, +{ + fn write( + &mut self, + key: &crate::types::storage::Key, + val: T, + ) -> storage_api::Result<()> { + let val = val.try_to_vec().unwrap(); + self.write_bytes(key, val) + } + + fn write_bytes( + &mut self, + key: &crate::types::storage::Key, + val: impl AsRef<[u8]>, + ) -> storage_api::Result<()> { + let _ = self + .db + .write_subspace_val(self.block.height, key, val) + .into_storage_result()?; + Ok(()) + } + + fn delete( + &mut self, + key: &crate::types::storage::Key, + ) -> storage_api::Result<()> { + let _ = self + .db + .delete_subspace_val(self.block.height, key) + .into_storage_result()?; + Ok(()) + } +} + impl From for Error { fn from(error: MerkleTreeError) -> Self { Self::MerkleTreeError(error) diff --git a/shared/src/ledger/storage/types.rs b/shared/src/ledger/storage/types.rs index c30c3873fc..0f270d4413 100644 --- a/shared/src/ledger/storage/types.rs +++ b/shared/src/ledger/storage/types.rs @@ -45,9 +45,10 @@ pub struct PrefixIterator { impl PrefixIterator { /// Initialize a new prefix iterator - pub fn new(iter: I, db_prefix: String) -> Self + pub fn new(iter: I, db_prefix: String) -> Self where - I: Iterator, + E: std::error::Error, + I: Iterator>, { PrefixIterator { iter, db_prefix } } diff --git a/shared/src/ledger/storage_api/collections/lazy_map.rs b/shared/src/ledger/storage_api/collections/lazy_map.rs new file mode 100644 index 0000000000..34a0f7d891 --- /dev/null +++ b/shared/src/ledger/storage_api/collections/lazy_map.rs @@ -0,0 +1,563 @@ +//! Lazy map. + +use std::collections::HashMap; +use std::fmt::Debug; +use std::hash::Hash; +use std::marker::PhantomData; + +use borsh::{BorshDeserialize, BorshSerialize}; +use thiserror::Error; + +use super::super::Result; +use super::{LazyCollection, ReadError}; +use crate::ledger::storage_api::validation::{self, Data}; +use crate::ledger::storage_api::{self, ResultExt, StorageRead, StorageWrite}; +use crate::ledger::vp_env::VpEnv; +use crate::types::storage::{self, DbKeySeg, KeySeg}; + +/// Subkey corresponding to the data elements of the LazyMap +pub const DATA_SUBKEY: &str = "data"; + +/// Lazy map. +/// +/// This can be used as an alternative to `std::collections::HashMap` and +/// `BTreeMap`. In the lazy map, the elements do not reside in memory but are +/// instead read and written to storage sub-keys of the storage `key` used to +/// construct the map. +/// +/// In the [`LazyMap`], the type of key `K` can be anything that implements +/// [`storage::KeySeg`] and this trait is used to turn the keys into key +/// segments. +#[derive(Debug)] +pub struct LazyMap { + key: storage::Key, + phantom_k: PhantomData, + phantom_v: PhantomData, + phantom_son: PhantomData, +} + +/// A `LazyMap` with another `LazyCollection` inside it's value `V` +pub type NestedMap = LazyMap; + +/// Possible sub-keys of a [`LazyMap`] +#[derive(Clone, Debug)] +pub enum SubKey { + /// Data sub-key, further sub-keyed by its literal map key + Data(K), +} + +/// Possible sub-keys of a [`LazyMap`], together with their [`validation::Data`] +/// that contains prior and posterior state. +#[derive(Clone, Debug)] +pub enum SubKeyWithData { + /// Data sub-key, further sub-keyed by its literal map key + Data(K, Data), +} + +/// Possible actions that can modify a simple (not nested) [`LazyMap`]. This +/// roughly corresponds to the methods that have `StorageWrite` access. +#[derive(Clone, Debug)] +pub enum Action { + /// Insert or update a value `V` at key `K` in a [`LazyMap`]. + Insert(K, V), + /// Remove a value `V` at key `K` from a [`LazyMap`]. + Remove(K, V), + /// Update a value `V` at key `K` in a [`LazyMap`]. + Update { + /// key at which the value is updated + key: K, + /// value before the update + pre: V, + /// value after the update + post: V, + }, +} + +/// Possible actions that can modify a nested [`LazyMap`]. +#[derive(Clone, Debug)] +pub enum NestedAction { + /// Nested collection action `A` at key `K` + At(K, A), +} + +/// Possible sub-keys of a nested [`LazyMap`] +#[derive(Clone, Debug)] +pub enum NestedSubKey { + /// Data sub-key + Data { + /// Literal map key + key: K, + /// Sub-key in the nested collection + nested_sub_key: S, + }, +} + +#[allow(missing_docs)] +#[derive(Error, Debug)] +pub enum ValidationError { + #[error("Invalid storage key {0}")] + InvalidSubKey(storage::Key), + #[error("Invalid nested storage key {0}")] + InvalidNestedSubKey(storage::Key), +} + +/// [`LazyMap`] validation result +pub type ValidationResult = std::result::Result; + +impl LazyCollection for LazyMap +where + K: storage::KeySeg + Clone + Hash + Eq + Debug, + V: LazyCollection + Debug, +{ + type Action = NestedAction::Action>; + type SubKey = NestedSubKey::SubKey>; + type SubKeyWithData = + NestedSubKey::SubKeyWithData>; + type Value = ::Value; + + fn open(key: storage::Key) -> Self { + Self { + key, + phantom_k: PhantomData, + phantom_v: PhantomData, + phantom_son: PhantomData, + } + } + + fn is_valid_sub_key( + &self, + key: &storage::Key, + ) -> storage_api::Result> { + let suffix = match key.split_prefix(&self.key) { + None => { + // not matching prefix, irrelevant + return Ok(None); + } + Some(None) => { + // no suffix, invalid + return Err(ValidationError::InvalidSubKey(key.clone())) + .into_storage_result(); + } + Some(Some(suffix)) => suffix, + }; + + // Match the suffix against expected sub-keys + match &suffix.segments[..2] { + [DbKeySeg::StringSeg(sub_a), DbKeySeg::StringSeg(sub_b)] + if sub_a == DATA_SUBKEY => + { + if let Ok(key_in_kv) = storage::KeySeg::parse(sub_b.clone()) { + let nested = self.at(&key_in_kv).is_valid_sub_key(key)?; + match nested { + Some(nested_sub_key) => Ok(Some(NestedSubKey::Data { + key: key_in_kv, + nested_sub_key, + })), + None => Err(ValidationError::InvalidNestedSubKey( + key.clone(), + )) + .into_storage_result(), + } + } else { + Err(ValidationError::InvalidSubKey(key.clone())) + .into_storage_result() + } + } + _ => Err(ValidationError::InvalidSubKey(key.clone())) + .into_storage_result(), + } + } + + fn read_sub_key_data( + env: &ENV, + storage_key: &storage::Key, + sub_key: Self::SubKey, + ) -> storage_api::Result> + where + ENV: for<'a> VpEnv<'a>, + { + let NestedSubKey::Data { + key, + // In here, we just have a nested sub-key without data + nested_sub_key, + } = sub_key; + // Try to read data from the nested collection + let nested_data = ::read_sub_key_data( + env, + storage_key, + nested_sub_key, + )?; + // If found, transform it back into a `NestedSubKey`, but with + // `nested_sub_key` replaced with the one we read + Ok(nested_data.map(|nested_sub_key| NestedSubKey::Data { + key, + nested_sub_key, + })) + } + + fn validate_changed_sub_keys( + keys: Vec, + ) -> storage_api::Result> { + // We have to group the nested sub-keys by the key from this map + let mut grouped_by_key: HashMap< + K, + Vec<::SubKeyWithData>, + > = HashMap::new(); + for NestedSubKey::Data { + key, + nested_sub_key, + } in keys + { + grouped_by_key + .entry(key) + .or_insert_with(Vec::new) + .push(nested_sub_key); + } + + // Recurse for each sub-keys group + let mut actions = vec![]; + for (key, sub_keys) in grouped_by_key { + let nested_actions = + ::validate_changed_sub_keys(sub_keys)?; + actions.extend( + nested_actions + .into_iter() + .map(|action| NestedAction::At(key.clone(), action)), + ); + } + Ok(actions) + } +} + +impl LazyCollection for LazyMap +where + K: storage::KeySeg + Debug, + V: BorshDeserialize + BorshSerialize + 'static + Debug, +{ + type Action = Action; + type SubKey = SubKey; + type SubKeyWithData = SubKeyWithData; + type Value = V; + + /// Create or use an existing map with the given storage `key`. + fn open(key: storage::Key) -> Self { + Self { + key, + phantom_k: PhantomData, + phantom_v: PhantomData, + phantom_son: PhantomData, + } + } + + fn is_valid_sub_key( + &self, + key: &storage::Key, + ) -> storage_api::Result> { + let suffix = match key.split_prefix(&self.key) { + None => { + // not matching prefix, irrelevant + return Ok(None); + } + Some(None) => { + // no suffix, invalid + return Err(ValidationError::InvalidSubKey(key.clone())) + .into_storage_result(); + } + Some(Some(suffix)) => suffix, + }; + + // Match the suffix against expected sub-keys + match &suffix.segments[..] { + [DbKeySeg::StringSeg(sub_a), DbKeySeg::StringSeg(sub_b)] + if sub_a == DATA_SUBKEY => + { + if let Ok(key_in_kv) = storage::KeySeg::parse(sub_b.clone()) { + Ok(Some(SubKey::Data(key_in_kv))) + } else { + Err(ValidationError::InvalidSubKey(key.clone())) + .into_storage_result() + } + } + _ => Err(ValidationError::InvalidSubKey(key.clone())) + .into_storage_result(), + } + } + + fn read_sub_key_data( + env: &ENV, + storage_key: &storage::Key, + sub_key: Self::SubKey, + ) -> storage_api::Result> + where + ENV: for<'a> VpEnv<'a>, + { + let SubKey::Data(key) = sub_key; + let data = validation::read_data(env, storage_key)?; + Ok(data.map(|data| SubKeyWithData::Data(key, data))) + } + + fn validate_changed_sub_keys( + keys: Vec, + ) -> storage_api::Result> { + Ok(keys + .into_iter() + .map(|change| { + let SubKeyWithData::Data(key, data) = change; + match data { + Data::Add { post } => Action::Insert(key, post), + Data::Update { pre, post } => { + Action::Update { key, pre, post } + } + Data::Delete { pre } => Action::Remove(key, pre), + } + }) + .collect()) + } +} + +// Generic `LazyMap` methods that require no bounds on values `V` +impl LazyMap +where + K: storage::KeySeg, +{ + /// Returns whether the set contains a value. + pub fn contains(&self, storage: &S, key: &K) -> Result + where + S: for<'iter> StorageRead<'iter>, + { + storage.has_key(&self.get_data_key(key)) + } + + /// Get the prefix of set's elements storage + fn get_data_prefix(&self) -> storage::Key { + self.key.push(&DATA_SUBKEY.to_owned()).unwrap() + } + + /// Get the sub-key of a given element + fn get_data_key(&self, key: &K) -> storage::Key { + let key_str = key.to_db_key(); + self.get_data_prefix().push(&key_str).unwrap() + } +} + +// `LazyMap` methods with nested `LazyCollection`s `V` +impl LazyMap +where + K: storage::KeySeg + Clone + Hash + Eq + Debug, + V: LazyCollection + Debug, +{ + /// Get a nested collection at given key `key`. If there is no nested + /// collection at the given key, a new empty one will be provided. The + /// nested collection may be manipulated through its methods. + pub fn at(&self, key: &K) -> V { + V::open(self.get_data_key(key)) + } + + /// An iterator visiting all key-value elements, where the values are from + /// the inner-most collection. The iterator element type is `Result<_>`, + /// because iterator's call to `next` may fail with e.g. out of gas or + /// data decoding error. + /// + /// Note that this function shouldn't be used in transactions and VPs code + /// on unbounded maps to avoid gas usage increasing with the length of the + /// map. + pub fn iter<'iter>( + &'iter self, + storage: &'iter impl StorageRead<'iter>, + ) -> Result< + impl Iterator< + Item = Result<( + ::SubKey, + ::Value, + )>, + > + 'iter, + > { + let iter = storage_api::iter_prefix(storage, &self.get_data_prefix())?; + Ok(iter.map(|key_val_res| { + let (key, val) = key_val_res?; + let sub_key = LazyCollection::is_valid_sub_key(self, &key)? + .ok_or(ReadError::UnexpectedlyEmptyStorageKey) + .into_storage_result()?; + Ok((sub_key, val)) + })) + } +} + +// `LazyMap` methods with borsh encoded values `V` +impl LazyMap +where + K: storage::KeySeg, + V: BorshDeserialize + BorshSerialize + 'static, +{ + /// Inserts a key-value pair into the map. + /// + /// The full storage key identifies the key in the pair, while the value is + /// held within the storage key. + /// + /// If the map did not have this key present, `None` is returned. + /// If the map did have this key present, the value is updated, and the old + /// value is returned. Unlike in `std::collection::HashMap`, the key is also + /// updated; this matters for types that can be `==` without being + /// identical. + pub fn insert( + &self, + storage: &mut S, + key: K, + val: V, + ) -> Result> + where + S: StorageWrite + for<'iter> StorageRead<'iter>, + { + let previous = self.get(storage, &key)?; + + let data_key = self.get_data_key(&key); + Self::write_key_val(storage, &data_key, val)?; + + Ok(previous) + } + + /// Removes a key from the map, returning the value at the key if the key + /// was previously in the map. + pub fn remove(&self, storage: &mut S, key: &K) -> Result> + where + S: StorageWrite + for<'iter> StorageRead<'iter>, + { + let value = self.get(storage, key)?; + + let data_key = self.get_data_key(key); + storage.delete(&data_key)?; + + Ok(value) + } + + /// Returns the value corresponding to the key, if any. + pub fn get(&self, storage: &S, key: &K) -> Result> + where + S: for<'iter> StorageRead<'iter>, + { + let data_key = self.get_data_key(key); + Self::read_key_val(storage, &data_key) + } + + /// Returns whether the map contains no elements. + pub fn is_empty(&self, storage: &S) -> Result + where + S: for<'iter> StorageRead<'iter>, + { + let mut iter = + storage_api::iter_prefix_bytes(storage, &self.get_data_prefix())?; + Ok(iter.next().is_none()) + } + + /// Reads the number of elements in the map. + /// + /// Note that this function shouldn't be used in transactions and VPs code + /// on unbounded maps to avoid gas usage increasing with the length of the + /// set. + #[allow(clippy::len_without_is_empty)] + pub fn len(&self, storage: &S) -> Result + where + S: for<'iter> StorageRead<'iter>, + { + let iter = + storage_api::iter_prefix_bytes(storage, &self.get_data_prefix())?; + iter.count().try_into().into_storage_result() + } + + /// An iterator visiting all key-value elements. The iterator element type + /// is `Result<(K, V)>`, because iterator's call to `next` may fail with + /// e.g. out of gas or data decoding error. + /// + /// Note that this function shouldn't be used in transactions and VPs code + /// on unbounded maps to avoid gas usage increasing with the length of the + /// map. + pub fn iter<'iter>( + &self, + storage: &'iter impl StorageRead<'iter>, + ) -> Result> + 'iter> { + let iter = storage_api::iter_prefix(storage, &self.get_data_prefix())?; + Ok(iter.map(|key_val_res| { + let (key, val) = key_val_res?; + let last_key_seg = key + .last() + .ok_or(ReadError::UnexpectedlyEmptyStorageKey) + .into_storage_result()?; + let key = K::parse(last_key_seg.raw()).into_storage_result()?; + Ok((key, val)) + })) + } + + /// Reads a value from storage + fn read_key_val( + storage: &S, + storage_key: &storage::Key, + ) -> Result> + where + S: for<'iter> StorageRead<'iter>, + { + let res = storage.read(storage_key)?; + Ok(res) + } + + /// Write a value into storage + fn write_key_val( + storage: &mut impl StorageWrite, + storage_key: &storage::Key, + val: V, + ) -> Result<()> { + storage.write(storage_key, val) + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::ledger::storage::testing::TestStorage; + + #[test] + fn test_lazy_map_basics() -> storage_api::Result<()> { + let mut storage = TestStorage::default(); + + let key = storage::Key::parse("test").unwrap(); + let lazy_map = LazyMap::::open(key); + + // The map should be empty at first + assert!(lazy_map.is_empty(&storage)?); + assert!(lazy_map.len(&storage)? == 0); + assert!(!lazy_map.contains(&storage, &0)?); + assert!(!lazy_map.contains(&storage, &1)?); + assert!(lazy_map.iter(&storage)?.next().is_none()); + assert!(lazy_map.get(&storage, &0)?.is_none()); + assert!(lazy_map.get(&storage, &1)?.is_none()); + assert!(lazy_map.remove(&mut storage, &0)?.is_none()); + assert!(lazy_map.remove(&mut storage, &1)?.is_none()); + + // Insert a new value and check that it's added + let (key, val) = (123, "Test".to_string()); + lazy_map.insert(&mut storage, key, val.clone())?; + assert!(!lazy_map.contains(&storage, &0)?); + assert!(lazy_map.contains(&storage, &key)?); + assert!(!lazy_map.is_empty(&storage)?); + assert!(lazy_map.len(&storage)? == 1); + assert_eq!( + lazy_map.iter(&storage)?.next().unwrap()?, + (key, val.clone()) + ); + assert!(lazy_map.get(&storage, &0)?.is_none()); + assert_eq!(lazy_map.get(&storage, &key)?.unwrap(), val); + + // Remove the last value and check that the map is empty again + let removed = lazy_map.remove(&mut storage, &key)?.unwrap(); + assert_eq!(removed, val); + assert!(lazy_map.is_empty(&storage)?); + assert!(lazy_map.len(&storage)? == 0); + assert!(!lazy_map.contains(&storage, &0)?); + assert!(!lazy_map.contains(&storage, &1)?); + assert!(lazy_map.get(&storage, &0)?.is_none()); + assert!(lazy_map.get(&storage, &key)?.is_none()); + assert!(lazy_map.iter(&storage)?.next().is_none()); + assert!(lazy_map.remove(&mut storage, &key)?.is_none()); + + Ok(()) + } +} diff --git a/shared/src/ledger/storage_api/collections/lazy_vec.rs b/shared/src/ledger/storage_api/collections/lazy_vec.rs new file mode 100644 index 0000000000..59eaa225e5 --- /dev/null +++ b/shared/src/ledger/storage_api/collections/lazy_vec.rs @@ -0,0 +1,516 @@ +//! Lazy dynamically-sized vector. + +use std::collections::BTreeSet; +use std::fmt::Debug; +use std::marker::PhantomData; + +use borsh::{BorshDeserialize, BorshSerialize}; +use thiserror::Error; + +use super::super::Result; +use super::LazyCollection; +use crate::ledger::storage_api::validation::{self, Data}; +use crate::ledger::storage_api::{self, ResultExt, StorageRead, StorageWrite}; +use crate::ledger::vp_env::VpEnv; +use crate::types::storage::{self, DbKeySeg}; + +/// Subkey pointing to the length of the LazyVec +pub const LEN_SUBKEY: &str = "len"; +/// Subkey corresponding to the data elements of the LazyVec +pub const DATA_SUBKEY: &str = "data"; + +/// Using `u64` for vector's indices +pub type Index = u64; + +/// Lazy dynamically-sized vector. +/// +/// This can be used as an alternative to `std::collections::Vec`. In the lazy +/// vector, the elements do not reside in memory but are instead read and +/// written to storage sub-keys of the storage `key` used to construct the +/// vector. +#[derive(Clone, Debug)] +pub struct LazyVec { + key: storage::Key, + phantom: PhantomData, +} + +/// Possible sub-keys of a [`LazyVec`] +#[derive(Debug)] +pub enum SubKey { + /// Length sub-key + Len, + /// Data sub-key, further sub-keyed by its index + Data(Index), +} + +/// Possible sub-keys of a [`LazyVec`], together with their [`validation::Data`] +/// that contains prior and posterior state. +#[derive(Debug)] +pub enum SubKeyWithData { + /// Length sub-key + Len(Data), + /// Data sub-key, further sub-keyed by its index + Data(Index, Data), +} + +/// Possible actions that can modify a [`LazyVec`]. This roughly corresponds to +/// the methods that have `StorageWrite` access. +#[derive(Clone, Debug)] +pub enum Action { + /// Push a value `T` into a [`LazyVec`] + Push(T), + /// Pop a value `T` from a [`LazyVec`] + Pop(T), + /// Update a value `T` at index from pre to post state in a [`LazyVec`] + Update { + /// index at which the value is updated + index: Index, + /// value before the update + pre: T, + /// value after the update + post: T, + }, +} + +#[allow(missing_docs)] +#[derive(Error, Debug)] +pub enum ValidationError { + #[error("Incorrect difference in LazyVec's length")] + InvalidLenDiff, + #[error("An empty LazyVec must be deleted from storage")] + EmptyVecShouldBeDeleted, + #[error("Push at a wrong index. Got {got}, expected {expected}.")] + UnexpectedPushIndex { got: Index, expected: Index }, + #[error("Pop at a wrong index. Got {got}, expected {expected}.")] + UnexpectedPopIndex { got: Index, expected: Index }, + #[error( + "Update (or a combination of pop and push) at a wrong index. Got \ + {got}, expected maximum {max}." + )] + UnexpectedUpdateIndex { got: Index, max: Index }, + #[error("An index has overflown its representation: {0}")] + IndexOverflow(>::Error), + #[error("Unexpected underflow in `{0} - {0}`")] + UnexpectedUnderflow(Index, Index), + #[error("Invalid storage key {0}")] + InvalidSubKey(storage::Key), +} + +#[allow(missing_docs)] +#[derive(Error, Debug)] +pub enum UpdateError { + #[error( + "Invalid index into a LazyVec. Got {index}, but the length is {len}" + )] + InvalidIndex { index: Index, len: u64 }, +} + +/// [`LazyVec`] validation result +pub type ValidationResult = std::result::Result; + +impl LazyCollection for LazyVec +where + T: BorshSerialize + BorshDeserialize + 'static + Debug, +{ + type Action = Action; + type SubKey = SubKey; + type SubKeyWithData = SubKeyWithData; + type Value = T; + + /// Create or use an existing vector with the given storage `key`. + fn open(key: storage::Key) -> Self { + Self { + key, + phantom: PhantomData, + } + } + + /// Check if the given storage key is a valid LazyVec sub-key and if so + /// return which one + fn is_valid_sub_key( + &self, + key: &storage::Key, + ) -> storage_api::Result> { + let suffix = match key.split_prefix(&self.key) { + None => { + // not matching prefix, irrelevant + return Ok(None); + } + Some(None) => { + // no suffix, invalid + return Err(ValidationError::InvalidSubKey(key.clone())) + .into_storage_result(); + } + Some(Some(suffix)) => suffix, + }; + + // Match the suffix against expected sub-keys + match &suffix.segments[..] { + [DbKeySeg::StringSeg(sub)] if sub == LEN_SUBKEY => { + Ok(Some(SubKey::Len)) + } + [DbKeySeg::StringSeg(sub_a), DbKeySeg::StringSeg(sub_b)] + if sub_a == DATA_SUBKEY => + { + if let Ok(index) = storage::KeySeg::parse(sub_b.clone()) { + Ok(Some(SubKey::Data(index))) + } else { + Err(ValidationError::InvalidSubKey(key.clone())) + .into_storage_result() + } + } + _ => Err(ValidationError::InvalidSubKey(key.clone())) + .into_storage_result(), + } + } + + fn read_sub_key_data( + env: &ENV, + storage_key: &storage::Key, + sub_key: Self::SubKey, + ) -> storage_api::Result> + where + ENV: for<'a> VpEnv<'a>, + { + let change = match sub_key { + SubKey::Len => { + let data = validation::read_data(env, storage_key)?; + data.map(SubKeyWithData::Len) + } + SubKey::Data(index) => { + let data = validation::read_data(env, storage_key)?; + data.map(|data| SubKeyWithData::Data(index, data)) + } + }; + Ok(change) + } + + /// The validation rules for a [`LazyVec`] are: + /// - A difference in the vector's length must correspond to the + /// difference in how many elements were pushed versus how many elements + /// were popped. + /// - An empty vector must be deleted from storage + /// - In addition, we check that indices of any changes are within an + /// expected range (i.e. the vectors indices should always be + /// monotonically increasing from zero) + fn validate_changed_sub_keys( + keys: Vec, + ) -> storage_api::Result> { + let mut actions = vec![]; + + // We need to accumulate some values for what's changed + let mut post_gt_pre = false; + let mut len_diff: u64 = 0; + let mut len_pre: u64 = 0; + let mut added = BTreeSet::::default(); + let mut updated = BTreeSet::::default(); + let mut deleted = BTreeSet::::default(); + + for key in keys { + match key { + SubKeyWithData::Len(data) => match data { + Data::Add { post } => { + if post == 0 { + return Err( + ValidationError::EmptyVecShouldBeDeleted, + ) + .into_storage_result(); + } + post_gt_pre = true; + len_diff = post; + } + Data::Update { pre, post } => { + if post == 0 { + return Err( + ValidationError::EmptyVecShouldBeDeleted, + ) + .into_storage_result(); + } + if post > pre { + post_gt_pre = true; + len_diff = post - pre; + } else { + len_diff = pre - post; + } + len_pre = pre; + } + Data::Delete { pre } => { + len_diff = pre; + len_pre = pre; + } + }, + SubKeyWithData::Data(index, data) => match data { + Data::Add { post } => { + actions.push(Action::Push(post)); + added.insert(index); + } + Data::Update { pre, post } => { + actions.push(Action::Update { index, pre, post }); + updated.insert(index); + } + Data::Delete { pre } => { + actions.push(Action::Pop(pre)); + deleted.insert(index); + } + }, + } + } + let added_len: u64 = added + .len() + .try_into() + .map_err(ValidationError::IndexOverflow) + .into_storage_result()?; + let deleted_len: u64 = deleted + .len() + .try_into() + .map_err(ValidationError::IndexOverflow) + .into_storage_result()?; + + if len_diff != 0 + && !(if post_gt_pre { + deleted_len + len_diff == added_len + } else { + added_len + len_diff == deleted_len + }) + { + return Err(ValidationError::InvalidLenDiff).into_storage_result(); + } + + let mut last_added = Option::None; + // Iterate additions in increasing order of indices + for index in added { + if let Some(last_added) = last_added { + // Following additions should be at monotonically increasing + // indices + let expected = last_added + 1; + if expected != index { + return Err(ValidationError::UnexpectedPushIndex { + got: index, + expected, + }) + .into_storage_result(); + } + } else if index != len_pre { + // The first addition must be at the pre length value. + // If something is deleted and a new value is added + // in its place, it will go through `Data::Update` + // instead. + return Err(ValidationError::UnexpectedPushIndex { + got: index, + expected: len_pre, + }) + .into_storage_result(); + } + last_added = Some(index); + } + + let mut last_deleted = Option::None; + // Also iterate deletions in increasing order of indices + for index in deleted { + if let Some(last_added) = last_deleted { + // Following deletions should be at monotonically increasing + // indices + let expected = last_added + 1; + if expected != index { + return Err(ValidationError::UnexpectedPopIndex { + got: index, + expected, + }) + .into_storage_result(); + } + } + last_deleted = Some(index); + } + if let Some(index) = last_deleted { + if len_pre > 0 { + let expected = len_pre - 1; + if index != expected { + // The last deletion must be at the pre length value minus 1 + return Err(ValidationError::UnexpectedPopIndex { + got: index, + expected: len_pre, + }) + .into_storage_result(); + } + } + } + + // And finally iterate updates + for index in updated { + // Update index has to be within the length bounds + let max = len_pre + len_diff; + if index >= max { + return Err(ValidationError::UnexpectedUpdateIndex { + got: index, + max, + }) + .into_storage_result(); + } + } + + Ok(actions) + } +} + +// Generic `LazyVec` methods that require no bounds on values `T` +impl LazyVec { + /// Reads the number of elements in the vector. + #[allow(clippy::len_without_is_empty)] + pub fn len(&self, storage: &S) -> Result + where + S: for<'iter> StorageRead<'iter>, + { + let len = storage.read(&self.get_len_key())?; + Ok(len.unwrap_or_default()) + } + + /// Returns `true` if the vector contains no elements. + pub fn is_empty(&self, storage: &S) -> Result + where + S: for<'iter> StorageRead<'iter>, + { + Ok(self.len(storage)? == 0) + } + + /// Get the prefix of set's elements storage + fn get_data_prefix(&self) -> storage::Key { + self.key.push(&DATA_SUBKEY.to_owned()).unwrap() + } + + /// Get the sub-key of vector's elements storage + fn get_data_key(&self, index: Index) -> storage::Key { + self.get_data_prefix().push(&index).unwrap() + } + + /// Get the sub-key of vector's length storage + fn get_len_key(&self) -> storage::Key { + self.key.push(&LEN_SUBKEY.to_owned()).unwrap() + } +} + +// `LazyVec` methods with borsh encoded values `T` +impl LazyVec +where + T: BorshSerialize + BorshDeserialize + 'static, +{ + /// Appends an element to the back of a collection. + pub fn push(&self, storage: &mut S, val: T) -> Result<()> + where + S: StorageWrite + for<'iter> StorageRead<'iter>, + { + let len = self.len(storage)?; + let data_key = self.get_data_key(len); + storage.write(&data_key, val)?; + storage.write(&self.get_len_key(), len + 1) + } + + /// Removes the last element from a vector and returns it, or `Ok(None)` if + /// it is empty. + /// + /// Note that an empty vector is completely removed from storage. + pub fn pop(&self, storage: &mut S) -> Result> + where + S: StorageWrite + for<'iter> StorageRead<'iter>, + { + let len = self.len(storage)?; + if len == 0 { + Ok(None) + } else { + let index = len - 1; + let data_key = self.get_data_key(index); + if len == 1 { + storage.delete(&self.get_len_key())?; + } else { + storage.write(&self.get_len_key(), index)?; + } + let popped_val = storage.read(&data_key)?; + storage.delete(&data_key)?; + Ok(popped_val) + } + } + + /// Update an element at the given index. + /// + /// The index must be smaller than the length of the vector, otherwise this + /// will fail with `UpdateError::InvalidIndex`. + pub fn update(&self, storage: &mut S, index: Index, val: T) -> Result<()> + where + S: StorageWrite + for<'iter> StorageRead<'iter>, + { + let len = self.len(storage)?; + if index >= len { + return Err(UpdateError::InvalidIndex { index, len }) + .into_storage_result(); + } + let data_key = self.get_data_key(index); + storage.write(&data_key, val) + } + + /// Read an element at the index or `Ok(None)` if out of bounds. + pub fn get(&self, storage: &S, index: Index) -> Result> + where + S: for<'iter> StorageRead<'iter>, + { + storage.read(&self.get_data_key(index)) + } + + /// An iterator visiting all elements. The iterator element type is + /// `Result`, because iterator's call to `next` may fail with e.g. out of + /// gas or data decoding error. + /// + /// Note that this function shouldn't be used in transactions and VPs code + /// on unbounded sets to avoid gas usage increasing with the length of the + /// set. + pub fn iter<'iter>( + &self, + storage: &'iter impl StorageRead<'iter>, + ) -> Result> + 'iter> { + let iter = storage_api::iter_prefix(storage, &self.get_data_prefix())?; + Ok(iter.map(|key_val_res| { + let (_key, val) = key_val_res?; + Ok(val) + })) + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::ledger::storage::testing::TestStorage; + + #[test] + fn test_lazy_vec_basics() -> storage_api::Result<()> { + let mut storage = TestStorage::default(); + + let key = storage::Key::parse("test").unwrap(); + let lazy_vec = LazyVec::::open(key); + + // The vec should be empty at first + assert!(lazy_vec.is_empty(&storage)?); + assert!(lazy_vec.len(&storage)? == 0); + assert!(lazy_vec.iter(&storage)?.next().is_none()); + assert!(lazy_vec.pop(&mut storage)?.is_none()); + assert!(lazy_vec.get(&storage, 0)?.is_none()); + assert!(lazy_vec.get(&storage, 1)?.is_none()); + + // Push a new value and check that it's added + lazy_vec.push(&mut storage, 15_u32)?; + assert!(!lazy_vec.is_empty(&storage)?); + assert!(lazy_vec.len(&storage)? == 1); + assert_eq!(lazy_vec.iter(&storage)?.next().unwrap()?, 15_u32); + assert_eq!(lazy_vec.get(&storage, 0)?.unwrap(), 15_u32); + assert!(lazy_vec.get(&storage, 1)?.is_none()); + + // Pop the last value and check that the vec is empty again + let popped = lazy_vec.pop(&mut storage)?.unwrap(); + assert_eq!(popped, 15_u32); + assert!(lazy_vec.is_empty(&storage)?); + assert!(lazy_vec.len(&storage)? == 0); + assert!(lazy_vec.iter(&storage)?.next().is_none()); + assert!(lazy_vec.pop(&mut storage)?.is_none()); + assert!(lazy_vec.get(&storage, 0)?.is_none()); + assert!(lazy_vec.get(&storage, 1)?.is_none()); + + Ok(()) + } +} diff --git a/shared/src/ledger/storage_api/collections/mod.rs b/shared/src/ledger/storage_api/collections/mod.rs new file mode 100644 index 0000000000..688b76bd49 --- /dev/null +++ b/shared/src/ledger/storage_api/collections/mod.rs @@ -0,0 +1,143 @@ +//! Lazy data structures for storage access where elements are not all loaded +//! into memory. This serves to minimize gas costs, avoid unbounded iteration +//! in some cases, and ease the validation of storage changes in VPs. +//! +//! Rather than finding the diff of the state before and after (which requires +//! iteration over both of the states that also have to be decoded), VPs will +//! just receive the storage sub-keys that have experienced changes without +//! having to check any of the unchanged elements. + +use std::fmt::Debug; + +use borsh::BorshDeserialize; +use derivative::Derivative; +use thiserror::Error; + +pub mod lazy_map; +pub mod lazy_vec; + +pub use lazy_map::LazyMap; +pub use lazy_vec::LazyVec; + +use crate::ledger::storage_api; +use crate::ledger::vp_env::VpEnv; +use crate::types::storage; + +#[allow(missing_docs)] +#[derive(Error, Debug)] +pub enum ReadError { + #[error("A storage key was unexpectedly empty")] + UnexpectedlyEmptyStorageKey, +} + +/// Simple lazy collection with borsh deserializable elements +#[derive(Debug)] +pub struct Simple; + +/// Lazy collection with a nested lazy collection +#[derive(Debug)] +pub struct Nested; + +/// A lazy collection of storage values is a handler with some storage prefix +/// that is given to its `fn new()`. The values are typically nested under this +/// prefix and they can be changed individually (e.g. without reading in the +/// whole collection) and their changes directly indicated to the validity +/// predicates, which do not need to iterate the whole collection pre/post to +/// find diffs. +/// +/// An empty collection must be deleted from storage. +pub trait LazyCollection { + /// Actions on the collection determined from changed storage keys by + /// `Self::validate` + type Action; + + /// Possible sub-keys in the collection + type SubKey: Debug; + + /// Possible sub-keys together with the data read from storage + type SubKeyWithData: Debug; + + /// A type of a value in the inner-most collection + type Value: BorshDeserialize; + + /// Create or use an existing vector with the given storage `key`. + fn open(key: storage::Key) -> Self; + + /// Check if the given storage key is a valid LazyVec sub-key and if so + /// return which one. Returns: + /// - `Ok(Some(_))` if it's a valid sub-key + /// - `Ok(None)` if it's not a sub-key + /// - `Err(_)` if it's an invalid sub-key + fn is_valid_sub_key( + &self, + key: &storage::Key, + ) -> storage_api::Result>; + + /// Try to read and decode the data for each change storage key in prior and + /// posterior state. If there is no value in neither prior or posterior + /// state (which is a possible state when transaction e.g. writes and then + /// deletes one storage key, but it is treated as a no-op as it doesn't + /// affect result of validation), returns `Ok(None)`. + fn read_sub_key_data( + env: &ENV, + storage_key: &storage::Key, + sub_key: Self::SubKey, + ) -> storage_api::Result> + where + ENV: for<'a> VpEnv<'a>; + + /// Validate changed sub-keys associated with their data and return back + /// a vector of `Self::Action`s, if the changes are valid + fn validate_changed_sub_keys( + keys: Vec, + ) -> storage_api::Result>; + + /// Accumulate storage changes inside a `ValidationBuilder`. This is + /// typically done by the validity predicate while looping through the + /// changed keys. If the resulting `builder` is not `None`, one must + /// call `fn build()` on it to get the validation result. + /// This function will return `Ok(true)` if the storage key is a valid + /// sub-key of this collection, `Ok(false)` if the storage key doesn't match + /// the prefix of this collection, or error if the prefix matches this + /// collection, but the key itself is not recognized. + fn accumulate( + &self, + env: &ENV, + builder: &mut Option>, + key_changed: &storage::Key, + ) -> storage_api::Result + where + ENV: for<'a> VpEnv<'a>, + { + if let Some(sub) = self.is_valid_sub_key(key_changed)? { + let change = Self::read_sub_key_data(env, key_changed, sub)?; + if let Some(change) = change { + let builder = + builder.get_or_insert(ValidationBuilder::default()); + builder.changes.push(change); + } + return Ok(true); + } + Ok(false) + } + + /// Execute validation on the validation builder, to be called when + /// `accumulate` instantiates the builder to `Some(_)`, after all the + /// changes storage keys have been processed. + fn validate( + builder: ValidationBuilder, + ) -> storage_api::Result> { + Self::validate_changed_sub_keys(builder.changes) + } +} + +/// Validation builder from storage changes. The changes can +/// be accumulated with `LazyCollection::accumulate()` and then turned into a +/// list of valid actions on the collection with `LazyCollection::validate()`. +#[derive(Debug, Derivative)] +// https://mcarton.github.io/rust-derivative/latest/Default.html#custom-bound +#[derivative(Default(bound = ""))] +pub struct ValidationBuilder { + /// The accumulator of found changes under the vector + pub changes: Vec, +} diff --git a/shared/src/ledger/storage_api/error.rs b/shared/src/ledger/storage_api/error.rs new file mode 100644 index 0000000000..f99539bc87 --- /dev/null +++ b/shared/src/ledger/storage_api/error.rs @@ -0,0 +1,91 @@ +//! Storage API error type, extensible with custom user errors and static string +//! messages. + +use thiserror::Error; + +#[allow(missing_docs)] +#[derive(Error, Debug)] +pub enum Error { + #[error("{0}")] + SimpleMessage(&'static str), + #[error("{0}")] + Custom(CustomError), + #[error("{0}: {1}")] + CustomWithMessage(&'static str, CustomError), +} + +/// Result of a storage API call. +pub type Result = std::result::Result; + +/// Result extension to easily wrap custom errors into [`enum@Error`]. +// This is separate from `ResultExt`, because the implementation requires +// different bounds for `T`. +pub trait ResultExt { + /// Convert a [`std::result::Result`] into storage_api [`Result`]. + fn into_storage_result(self) -> Result; + + /// Add a static message to a possible error in [`Result`]. + fn wrap_err(self, msg: &'static str) -> Result; +} + +impl ResultExt for std::result::Result +where + E: std::error::Error + Send + Sync + 'static, +{ + fn into_storage_result(self) -> Result { + self.map_err(Error::new) + } + + fn wrap_err(self, msg: &'static str) -> Result { + self.map_err(|err| Error::wrap(msg, err)) + } +} + +impl Error { + /// Create an [`enum@Error`] from another [`std::error::Error`]. + pub fn new(error: E) -> Self + where + E: Into>, + { + Self::Custom(CustomError(error.into())) + } + + /// Create an [`enum@Error`] from a static message. + #[inline] + pub const fn new_const(msg: &'static str) -> Self { + Self::SimpleMessage(msg) + } + + /// Wrap another [`std::error::Error`] with a static message. + pub fn wrap(msg: &'static str, error: E) -> Self + where + E: Into>, + { + Self::CustomWithMessage(msg, CustomError(error.into())) + } +} + +/// A custom error +#[derive(Debug)] +pub struct CustomError(pub Box); + +impl std::fmt::Display for CustomError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +/// An extension to [`Option`] to allow turning `None` case to an Error from a +/// static string (handy for WASM). +pub trait OptionExt { + /// Transforms the [`Option`] into a [`Result`], mapping + /// [`Some(v)`] to [`Ok(v)`] and [`None`] to the given static error + /// message. + fn ok_or_err_msg(self, msg: &'static str) -> Result; +} + +impl OptionExt for Option { + fn ok_or_err_msg(self, msg: &'static str) -> Result { + self.ok_or_else(|| Error::new_const(msg)) + } +} diff --git a/shared/src/ledger/storage_api/mod.rs b/shared/src/ledger/storage_api/mod.rs new file mode 100644 index 0000000000..b806f35801 --- /dev/null +++ b/shared/src/ledger/storage_api/mod.rs @@ -0,0 +1,257 @@ +//! The common storage read trait is implemented in the storage, client RPC, tx +//! and VPs (both native and WASM). + +pub mod collections; +mod error; +pub mod validation; + +use borsh::{BorshDeserialize, BorshSerialize}; +pub use error::{CustomError, Error, OptionExt, Result, ResultExt}; + +use crate::types::storage::{self, BlockHash, BlockHeight, Epoch}; + +/// Common storage read interface +/// +/// If you're using this trait and having compiler complaining about needing an +/// explicit lifetime parameter, simply use trait bounds with the following +/// syntax: +/// +/// ```rust,ignore +/// where +/// S: for<'iter> StorageRead<'iter> +/// ``` +/// +/// If you want to know why this is needed, see the to-do task below. The +/// syntax for this relies on higher-rank lifetimes, see e.g. +/// . +/// +/// TODO: once GATs are stabilized, we should be able to remove the `'iter` +/// lifetime param that is currently the only way to make the prefix iterator +/// typecheck in the `>::PrefixIter` associated type used in +/// `impl StorageRead for Storage` (shared/src/ledger/storage/mod.rs). +/// See +pub trait StorageRead<'iter> { + /// Storage read prefix iterator + type PrefixIter; + + /// Storage read Borsh encoded value. It will try to read from the storage + /// and decode it if found. + fn read( + &self, + key: &storage::Key, + ) -> Result> { + let bytes = self.read_bytes(key)?; + match bytes { + Some(bytes) => { + let val = T::try_from_slice(&bytes).into_storage_result()?; + Ok(Some(val)) + } + None => Ok(None), + } + } + + /// Storage read raw bytes. It will try to read from the storage. + fn read_bytes(&self, key: &storage::Key) -> Result>>; + + /// Storage `has_key` in. It will try to read from the storage. + fn has_key(&self, key: &storage::Key) -> Result; + + /// Storage prefix iterator ordered by the storage keys. It will try to get + /// an iterator from the storage. + /// + /// For a more user-friendly iterator API, use [`fn@iter_prefix`] or + /// [`fn@iter_prefix_bytes`] instead. + fn iter_prefix( + &'iter self, + prefix: &storage::Key, + ) -> Result; + + /// Storage prefix iterator in reverse order of the storage keys. It will + /// try to get an iterator from the storage. + /// + /// For a more user-friendly iterator API, use [`fn@rev_iter_prefix`] or + /// [`fn@rev_iter_prefix_bytes`] instead. + fn rev_iter_prefix( + &'iter self, + prefix: &storage::Key, + ) -> Result; + + /// Storage prefix iterator. It will try to read from the storage. + fn iter_next( + &self, + iter: &mut Self::PrefixIter, + ) -> Result)>>; + + /// Getting the chain ID. + fn get_chain_id(&self) -> Result; + + /// Getting the block height. The height is that of the block to which the + /// current transaction is being applied. + fn get_block_height(&self) -> Result; + + /// Getting the block hash. The height is that of the block to which the + /// current transaction is being applied. + fn get_block_hash(&self) -> Result; + + /// Getting the block epoch. The epoch is that of the block to which the + /// current transaction is being applied. + fn get_block_epoch(&self) -> Result; +} + +/// Common storage write interface +pub trait StorageWrite { + /// Write a value to be encoded with Borsh at the given key to storage. + fn write( + &mut self, + key: &storage::Key, + val: T, + ) -> Result<()> { + let bytes = val.try_to_vec().into_storage_result()?; + self.write_bytes(key, bytes) + } + + /// Write a value as bytes at the given key to storage. + fn write_bytes( + &mut self, + key: &storage::Key, + val: impl AsRef<[u8]>, + ) -> Result<()>; + + /// Delete a value at the given key from storage. + fn delete(&mut self, key: &storage::Key) -> Result<()>; +} + +/// Iterate items matching the given prefix, ordered by the storage keys. +pub fn iter_prefix_bytes<'a>( + storage: &'a impl StorageRead<'a>, + prefix: &crate::types::storage::Key, +) -> Result)>> + 'a> { + let iter = storage.iter_prefix(prefix)?; + let iter = itertools::unfold(iter, |iter| { + match storage.iter_next(iter) { + Ok(Some((key, val))) => { + let key = match storage::Key::parse(key).into_storage_result() { + Ok(key) => key, + Err(err) => { + // Propagate key encoding errors into Iterator's Item + return Some(Err(err)); + } + }; + Some(Ok((key, val))) + } + Ok(None) => None, + Err(err) => { + // Propagate `iter_next` errors into Iterator's Item + Some(Err(err)) + } + } + }); + Ok(iter) +} + +/// Iterate Borsh encoded items matching the given prefix, ordered by the +/// storage keys. +pub fn iter_prefix<'a, T>( + storage: &'a impl StorageRead<'a>, + prefix: &crate::types::storage::Key, +) -> Result> + 'a> +where + T: BorshDeserialize, +{ + let iter = storage.iter_prefix(prefix)?; + let iter = itertools::unfold(iter, |iter| { + match storage.iter_next(iter) { + Ok(Some((key, val))) => { + let key = match storage::Key::parse(key).into_storage_result() { + Ok(key) => key, + Err(err) => { + // Propagate key encoding errors into Iterator's Item + return Some(Err(err)); + } + }; + let val = match T::try_from_slice(&val).into_storage_result() { + Ok(val) => val, + Err(err) => { + // Propagate val encoding errors into Iterator's Item + return Some(Err(err)); + } + }; + Some(Ok((key, val))) + } + Ok(None) => None, + Err(err) => { + // Propagate `iter_next` errors into Iterator's Item + Some(Err(err)) + } + } + }); + Ok(iter) +} + +/// Iterate items matching the given prefix, reverse ordered by the storage +/// keys. +pub fn rev_iter_prefix_bytes<'a>( + storage: &'a impl StorageRead<'a>, + prefix: &crate::types::storage::Key, +) -> Result)>> + 'a> { + let iter = storage.rev_iter_prefix(prefix)?; + let iter = itertools::unfold(iter, |iter| { + match storage.iter_next(iter) { + Ok(Some((key, val))) => { + let key = match storage::Key::parse(key).into_storage_result() { + Ok(key) => key, + Err(err) => { + // Propagate key encoding errors into Iterator's Item + return Some(Err(err)); + } + }; + Some(Ok((key, val))) + } + Ok(None) => None, + Err(err) => { + // Propagate `iter_next` errors into Iterator's Item + Some(Err(err)) + } + } + }); + Ok(iter) +} + +/// Iterate Borsh encoded items matching the given prefix, reverse ordered by +/// the storage keys. +pub fn rev_iter_prefix<'a, T>( + storage: &'a impl StorageRead<'a>, + prefix: &crate::types::storage::Key, +) -> Result> + 'a> +where + T: BorshDeserialize, +{ + let iter = storage.rev_iter_prefix(prefix)?; + let iter = itertools::unfold(iter, |iter| { + match storage.iter_next(iter) { + Ok(Some((key, val))) => { + let key = match storage::Key::parse(key).into_storage_result() { + Ok(key) => key, + Err(err) => { + // Propagate key encoding errors into Iterator's Item + return Some(Err(err)); + } + }; + let val = match T::try_from_slice(&val).into_storage_result() { + Ok(val) => val, + Err(err) => { + // Propagate val encoding errors into Iterator's Item + return Some(Err(err)); + } + }; + Some(Ok((key, val))) + } + Ok(None) => None, + Err(err) => { + // Propagate `iter_next` errors into Iterator's Item + Some(Err(err)) + } + } + }); + Ok(iter) +} diff --git a/shared/src/ledger/storage_api/validation/mod.rs b/shared/src/ledger/storage_api/validation/mod.rs new file mode 100644 index 0000000000..ca0e779a75 --- /dev/null +++ b/shared/src/ledger/storage_api/validation/mod.rs @@ -0,0 +1,54 @@ +//! Storage change validation helpers + +use std::fmt::Debug; + +use borsh::BorshDeserialize; + +use crate::ledger::storage_api; +use crate::ledger::vp_env::VpEnv; +use crate::types::storage; + +/// Data update with prior and posterior state. +#[derive(Clone, Debug)] +pub enum Data { + /// Newly added value + Add { + /// Posterior state + post: T, + }, + /// Updated value prior and posterior state + Update { + /// Prior state + pre: T, + /// Posterior state + post: T, + }, + /// Deleted value + Delete { + /// Prior state + pre: T, + }, +} + +/// Read the prior and posterior state for the given key. +pub fn read_data( + env: &ENV, + key: &storage::Key, +) -> Result>, storage_api::Error> +where + T: BorshDeserialize, + ENV: for<'a> VpEnv<'a>, +{ + let pre = env.read_pre(key)?; + let post = env.read_post(key)?; + Ok(match (pre, post) { + (None, None) => { + // If the key was inserted and then deleted in the same tx, we don't + // need to validate it as it's not visible to any VPs + None + } + (None, Some(post)) => Some(Data::Add { post }), + (Some(pre), None) => Some(Data::Delete { pre }), + (Some(pre), Some(post)) => Some(Data::Update { pre, post }), + }) +} diff --git a/shared/src/ledger/treasury/mod.rs b/shared/src/ledger/treasury/mod.rs deleted file mode 100644 index 35b4ce5022..0000000000 --- a/shared/src/ledger/treasury/mod.rs +++ /dev/null @@ -1,181 +0,0 @@ -//! Treasury VP - -use std::collections::BTreeSet; -/// treasury parameters -pub mod parameters; -/// treasury storage -pub mod storage; - -use borsh::BorshDeserialize; -use thiserror::Error; - -use self::storage as treasury_storage; -use super::governance::vp::is_proposal_accepted; -use crate::ledger::native_vp::{self, Ctx, NativeVp}; -use crate::ledger::storage::traits::StorageHasher; -use crate::ledger::storage::{self as ledger_storage}; -use crate::types::address::{xan as nam, Address, InternalAddress}; -use crate::types::storage::Key; -use crate::types::token; -use crate::vm::WasmCacheAccess; - -/// Internal treasury address -pub const ADDRESS: Address = Address::Internal(InternalAddress::Treasury); - -#[allow(missing_docs)] -#[derive(Error, Debug)] -pub enum Error { - #[error("Native VP error: {0}")] - NativeVpError(native_vp::Error), -} - -/// Treasury functions result -pub type Result = std::result::Result; - -/// Treasury VP -pub struct TreasuryVp<'a, DB, H, CA> -where - DB: ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter>, - H: StorageHasher, - CA: WasmCacheAccess, -{ - /// Context to interact with the host structures. - pub ctx: Ctx<'a, DB, H, CA>, -} - -impl<'a, DB, H, CA> NativeVp for TreasuryVp<'a, DB, H, CA> -where - DB: 'static + ledger_storage::DB + for<'iter> ledger_storage::DBIter<'iter>, - H: 'static + StorageHasher, - CA: 'static + WasmCacheAccess, -{ - type Error = Error; - - const ADDR: InternalAddress = InternalAddress::Treasury; - - fn validate_tx( - &self, - tx_data: &[u8], - keys_changed: &BTreeSet, - _verifiers: &BTreeSet
, - ) -> Result { - let result = keys_changed.iter().all(|key| { - let key_type: KeyType = key.into(); - match key_type { - KeyType::PARAMETER => { - let proposal_id = u64::try_from_slice(tx_data).ok(); - match proposal_id { - Some(id) => is_proposal_accepted(&self.ctx, id), - _ => false, - } - } - KeyType::BALANCE(addr) => { - let proposal_id = u64::try_from_slice(tx_data).ok(); - if let Some(id) = proposal_id { - if !is_proposal_accepted(&self.ctx, id) { - return false; - } - } else { - return false; - }; - let is_max_funds_transfer_key = - treasury_storage::get_max_transferable_fund_key(); - let balance_key = token::balance_key(&nam(), &ADDRESS); - let max_transfer_amount = - self.ctx.read_pre(&is_max_funds_transfer_key); - let pre_balance = self.ctx.read_pre(&balance_key); - let post_balance = self.ctx.read_post(&balance_key); - if addr.ne(&ADDRESS) { - return true; - } - match (max_transfer_amount, pre_balance, post_balance) { - ( - Ok(max_transfer_amount), - Ok(pre_balance), - Ok(post_balance), - ) => { - match ( - max_transfer_amount, - pre_balance, - post_balance, - ) { - ( - Some(max_transfer_amount), - Some(pre_balance), - Some(post_balance), - ) => { - let max_transfer_amount = - token::Amount::try_from_slice( - &max_transfer_amount[..], - ) - .ok(); - let pre_balance = - token::Amount::try_from_slice( - &pre_balance[..], - ) - .ok(); - let post_balance = - token::Amount::try_from_slice( - &post_balance[..], - ) - .ok(); - match ( - max_transfer_amount, - pre_balance, - post_balance, - ) { - ( - Some(max_transfer_amount), - Some(pre_balance), - Some(post_balance), - ) => { - post_balance > pre_balance - || (pre_balance - post_balance - <= max_transfer_amount) - } - _ => false, - } - } - _ => false, - } - } - _ => false, - } - } - KeyType::UNKNOWN_TREASURY => false, - KeyType::UNKNOWN => true, - } - }); - Ok(result) - } -} - -#[allow(clippy::upper_case_acronyms)] -enum KeyType { - #[allow(clippy::upper_case_acronyms)] - BALANCE(Address), - #[allow(clippy::upper_case_acronyms)] - PARAMETER, - #[allow(clippy::upper_case_acronyms)] - #[allow(non_camel_case_types)] - UNKNOWN_TREASURY, - #[allow(clippy::upper_case_acronyms)] - UNKNOWN, -} - -impl From<&Key> for KeyType { - fn from(value: &Key) -> Self { - if treasury_storage::is_parameter_key(value) { - KeyType::PARAMETER - } else if treasury_storage::is_treasury_key(value) { - KeyType::UNKNOWN_TREASURY - } else if token::is_any_token_balance_key(value).is_some() { - match token::is_balance_key(&nam(), value) { - Some(addr) => KeyType::BALANCE(addr.clone()), - None => KeyType::UNKNOWN, - } - } else { - KeyType::UNKNOWN - } - } -} diff --git a/shared/src/ledger/treasury/parameters.rs b/shared/src/ledger/treasury/parameters.rs deleted file mode 100644 index 11d3b6db5a..0000000000 --- a/shared/src/ledger/treasury/parameters.rs +++ /dev/null @@ -1,47 +0,0 @@ -use borsh::{BorshDeserialize, BorshSerialize}; - -use super::storage as treasury_storage; -use crate::ledger::storage::types::encode; -use crate::ledger::storage::{self, Storage}; -use crate::types::token::Amount; - -#[derive( - Clone, - Debug, - PartialEq, - Eq, - PartialOrd, - Ord, - Hash, - BorshSerialize, - BorshDeserialize, -)] -/// Governance parameter structure -pub struct TreasuryParams { - /// Maximum amount of token that can be moved in a single transfer - pub max_proposal_fund_transfer: u64, -} - -impl Default for TreasuryParams { - fn default() -> Self { - Self { - max_proposal_fund_transfer: 10_000, - } - } -} - -impl TreasuryParams { - /// Initialize treasury parameters into storage - pub fn init_storage(&self, storage: &mut Storage) - where - DB: storage::DB + for<'iter> storage::DBIter<'iter>, - H: storage::traits::StorageHasher, - { - let max_proposal_fund_transfer_key = - treasury_storage::get_max_transferable_fund_key(); - let amount = Amount::whole(self.max_proposal_fund_transfer); - storage - .write(&max_proposal_fund_transfer_key, encode(&amount)) - .unwrap(); - } -} diff --git a/shared/src/ledger/treasury/storage.rs b/shared/src/ledger/treasury/storage.rs deleted file mode 100644 index e4a8733240..0000000000 --- a/shared/src/ledger/treasury/storage.rs +++ /dev/null @@ -1,33 +0,0 @@ -use super::ADDRESS; -use crate::types::storage::{DbKeySeg, Key, KeySeg}; - -const MAX_TRANSFERABLE_FUND_KEY: &str = "max_fund"; - -/// Check if a key is a treasury key -pub fn is_treasury_key(key: &Key) -> bool { - matches!(&key.segments[0], DbKeySeg::AddressSeg(addr) if addr == &ADDRESS) -} - -/// Check if key is max funds transfer key -pub fn is_max_funds_transfer_key(key: &Key) -> bool { - match &key.segments[..] { - [DbKeySeg::AddressSeg(addr), DbKeySeg::StringSeg(max_fund)] - if addr == &ADDRESS && max_fund == MAX_TRANSFERABLE_FUND_KEY => - { - true - } - _ => false, - } -} - -/// Check if key is any parameter key -pub fn is_parameter_key(key: &Key) -> bool { - is_max_funds_transfer_key(key) -} - -/// Get key of max funds transfer parameter -pub fn get_max_transferable_fund_key() -> Key { - Key::from(ADDRESS.to_db_key()) - .push(&MAX_TRANSFERABLE_FUND_KEY.to_owned()) - .expect("Cannot obtain a storage key") -} diff --git a/shared/src/ledger/tx_env.rs b/shared/src/ledger/tx_env.rs new file mode 100644 index 0000000000..7672ac6505 --- /dev/null +++ b/shared/src/ledger/tx_env.rs @@ -0,0 +1,63 @@ +//! Transaction environment contains functions that can be called from +//! inside a tx. + +use borsh::BorshSerialize; + +use crate::ledger::storage_api::{self, StorageRead, StorageWrite}; +use crate::types::address::Address; +use crate::types::ibc::IbcEvent; +use crate::types::storage; +use crate::types::time::Rfc3339String; + +/// Transaction host functions +pub trait TxEnv<'iter>: StorageRead<'iter> + StorageWrite { + /// Write a temporary value to be encoded with Borsh at the given key to + /// storage. + fn write_temp( + &mut self, + key: &storage::Key, + val: T, + ) -> Result<(), storage_api::Error>; + + /// Write a temporary value as bytes at the given key to storage. + fn write_bytes_temp( + &mut self, + key: &storage::Key, + val: impl AsRef<[u8]>, + ) -> Result<(), storage_api::Error>; + + /// Insert a verifier address. This address must exist on chain, otherwise + /// the transaction will be rejected. + /// + /// Validity predicates of each verifier addresses inserted in the + /// transaction will validate the transaction and will receive all the + /// changed storage keys and initialized accounts in their inputs. + fn insert_verifier( + &mut self, + addr: &Address, + ) -> Result<(), storage_api::Error>; + + /// Initialize a new account generates a new established address and + /// writes the given code as its validity predicate into the storage. + fn init_account( + &mut self, + code: impl AsRef<[u8]>, + ) -> Result; + + /// Update a validity predicate + fn update_validity_predicate( + &mut self, + addr: &Address, + code: impl AsRef<[u8]>, + ) -> Result<(), storage_api::Error>; + + /// Emit an IBC event. There can be only one event per transaction. On + /// multiple calls, only the last emitted event will be used. + fn emit_ibc_event( + &mut self, + event: &IbcEvent, + ) -> Result<(), storage_api::Error>; + + /// Get time of the current block header as rfc 3339 string + fn get_block_time(&self) -> Result; +} diff --git a/shared/src/ledger/vp_env.rs b/shared/src/ledger/vp_env.rs index aafd4b135a..32dd93232d 100644 --- a/shared/src/ledger/vp_env.rs +++ b/shared/src/ledger/vp_env.rs @@ -3,9 +3,11 @@ use std::num::TryFromIntError; +use borsh::BorshDeserialize; use thiserror::Error; use super::gas::MIN_STORAGE_GAS; +use super::storage_api::{self, StorageRead}; use crate::ledger::gas; use crate::ledger::gas::VpGasMeter; use crate::ledger::storage::traits::StorageHasher; @@ -13,8 +15,170 @@ use crate::ledger::storage::write_log::WriteLog; use crate::ledger::storage::{self, write_log, Storage}; use crate::proto::Tx; use crate::types::hash::Hash; +use crate::types::key::common; use crate::types::storage::{BlockHash, BlockHeight, Epoch, Key}; +/// Validity predicate's environment is available for native VPs and WASM VPs +pub trait VpEnv<'view> { + /// Storage read prefix iterator + type PrefixIter; + + /// Type to read storage state before the transaction execution + type Pre: StorageRead<'view, PrefixIter = Self::PrefixIter>; + + /// Type to read storage state after the transaction execution + type Post: StorageRead<'view, PrefixIter = Self::PrefixIter>; + + /// Read storage state before the transaction execution + fn pre(&'view self) -> Self::Pre; + + /// Read storage state after the transaction execution + fn post(&'view self) -> Self::Post; + + /// Storage read temporary state Borsh encoded value (after tx execution). + /// It will try to read from only the write log and then decode it if + /// found. + fn read_temp( + &self, + key: &Key, + ) -> Result, storage_api::Error>; + + /// Storage read temporary state raw bytes (after tx execution). It will try + /// to read from only the write log. + fn read_bytes_temp( + &self, + key: &Key, + ) -> Result>, storage_api::Error>; + + /// Getting the chain ID. + fn get_chain_id(&'view self) -> Result; + + /// Getting the block height. The height is that of the block to which the + /// current transaction is being applied. + fn get_block_height(&'view self) + -> Result; + + /// Getting the block hash. The height is that of the block to which the + /// current transaction is being applied. + fn get_block_hash(&'view self) -> Result; + + /// Getting the block epoch. The epoch is that of the block to which the + /// current transaction is being applied. + fn get_block_epoch(&'view self) -> Result; + + /// Storage prefix iterator, ordered by storage keys. It will try to get an + /// iterator from the storage. + fn iter_prefix( + &'view self, + prefix: &Key, + ) -> Result; + + /// Storage prefix iterator, reverse ordered by storage keys. It will try to + /// get an iterator from the storage. + fn rev_iter_prefix( + &self, + prefix: &Key, + ) -> Result; + + /// Evaluate a validity predicate with given data. The address, changed + /// storage keys and verifiers will have the same values as the input to + /// caller's validity predicate. + /// + /// If the execution fails for whatever reason, this will return `false`. + /// Otherwise returns the result of evaluation. + fn eval( + &self, + vp_code: Vec, + input_data: Vec, + ) -> Result; + + /// Verify a transaction signature. The signature is expected to have been + /// produced on the encoded transaction [`crate::proto::Tx`] + /// using [`crate::proto::Tx::sign`]. + fn verify_tx_signature( + &self, + pk: &common::PublicKey, + sig: &common::Signature, + ) -> Result; + + /// Get a tx hash + fn get_tx_code_hash(&self) -> Result; + + // ---- Methods below have default implementation via `pre/post` ---- + + /// Storage read prior state Borsh encoded value (before tx execution). It + /// will try to read from the storage and decode it if found. + fn read_pre( + &'view self, + key: &Key, + ) -> Result, storage_api::Error> { + self.pre().read(key) + } + + /// Storage read prior state raw bytes (before tx execution). It + /// will try to read from the storage. + fn read_bytes_pre( + &'view self, + key: &Key, + ) -> Result>, storage_api::Error> { + self.pre().read_bytes(key) + } + + /// Storage read posterior state Borsh encoded value (after tx execution). + /// It will try to read from the write log first and if no entry found + /// then from the storage and then decode it if found. + fn read_post( + &'view self, + key: &Key, + ) -> Result, storage_api::Error> { + self.post().read(key) + } + + /// Storage read posterior state raw bytes (after tx execution). It will try + /// to read from the write log first and if no entry found then from the + /// storage. + fn read_bytes_post( + &'view self, + key: &Key, + ) -> Result>, storage_api::Error> { + self.post().read_bytes(key) + } + + /// Storage `has_key` in prior state (before tx execution). It will try to + /// read from the storage. + fn has_key_pre(&'view self, key: &Key) -> Result { + self.pre().has_key(key) + } + + /// Storage `has_key` in posterior state (after tx execution). It will try + /// to check the write log first and if no entry found then the storage. + fn has_key_post( + &'view self, + key: &Key, + ) -> Result { + self.post().has_key(key) + } + + /// Storage prefix iterator for prior state (before tx execution). It will + /// try to read from the storage. + fn iter_pre_next( + &'view self, + iter: &mut Self::PrefixIter, + ) -> Result)>, storage_api::Error> { + self.pre().iter_next(iter) + } + + /// Storage prefix iterator next for posterior state (after tx execution). + /// It will try to read from the write log first and if no entry found + /// then from the storage. + fn iter_post_next( + &'view self, + iter: &mut Self::PrefixIter, + ) -> Result)>, storage_api::Error> { + self.post().iter_next(iter) + } +} + /// These runtime errors will abort VP execution immediately #[allow(missing_docs)] #[derive(Error, Debug)] @@ -38,10 +202,10 @@ pub enum RuntimeError { } /// VP environment function result -pub type Result = std::result::Result; +pub type EnvResult = std::result::Result; /// Add a gas cost incured in a validity predicate -pub fn add_gas(gas_meter: &mut VpGasMeter, used_gas: u64) -> Result<()> { +pub fn add_gas(gas_meter: &mut VpGasMeter, used_gas: u64) -> EnvResult<()> { let result = gas_meter.add(used_gas).map_err(RuntimeError::OutOfGas); if let Err(err) = &result { tracing::info!("Stopping VP execution because of gas error: {}", err); @@ -56,7 +220,7 @@ pub fn read_pre( storage: &Storage, write_log: &WriteLog, key: &Key, -) -> Result>> +) -> EnvResult>> where DB: storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, @@ -97,7 +261,7 @@ pub fn read_post( storage: &Storage, write_log: &WriteLog, key: &Key, -) -> Result>> +) -> EnvResult>> where DB: storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, @@ -138,7 +302,7 @@ pub fn read_temp( gas_meter: &mut VpGasMeter, write_log: &WriteLog, key: &Key, -) -> Result>> { +) -> EnvResult>> { // Try to read from the write log first let (log_val, gas) = write_log.read(key); add_gas(gas_meter, gas)?; @@ -157,7 +321,7 @@ pub fn has_key_pre( gas_meter: &mut VpGasMeter, storage: &Storage, key: &Key, -) -> Result +) -> EnvResult where DB: storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, @@ -175,7 +339,7 @@ pub fn has_key_post( storage: &Storage, write_log: &WriteLog, key: &Key, -) -> Result +) -> EnvResult where DB: storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, @@ -205,7 +369,7 @@ where pub fn get_chain_id( gas_meter: &mut VpGasMeter, storage: &Storage, -) -> Result +) -> EnvResult where DB: storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, @@ -220,7 +384,7 @@ where pub fn get_block_height( gas_meter: &mut VpGasMeter, storage: &Storage, -) -> Result +) -> EnvResult where DB: storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, @@ -235,7 +399,7 @@ where pub fn get_block_hash( gas_meter: &mut VpGasMeter, storage: &Storage, -) -> Result +) -> EnvResult where DB: storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, @@ -247,7 +411,10 @@ where /// Getting the block hash. The height is that of the block to which the /// current transaction is being applied. -pub fn get_tx_code_hash(gas_meter: &mut VpGasMeter, tx: &Tx) -> Result { +pub fn get_tx_code_hash( + gas_meter: &mut VpGasMeter, + tx: &Tx, +) -> EnvResult { let hash = Hash(tx.code_hash()); add_gas(gas_meter, MIN_STORAGE_GAS)?; Ok(hash) @@ -258,7 +425,7 @@ pub fn get_tx_code_hash(gas_meter: &mut VpGasMeter, tx: &Tx) -> Result { pub fn get_block_epoch( gas_meter: &mut VpGasMeter, storage: &Storage, -) -> Result +) -> EnvResult where DB: storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, @@ -268,12 +435,13 @@ where Ok(epoch) } -/// Storage prefix iterator. It will try to get an iterator from the storage. +/// Storage prefix iterator, ordered by storage keys. It will try to get an +/// iterator from the storage. pub fn iter_prefix<'a, DB, H>( gas_meter: &mut VpGasMeter, storage: &'a Storage, prefix: &Key, -) -> Result<>::PrefixIter> +) -> EnvResult<>::PrefixIter> where DB: storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, @@ -283,12 +451,28 @@ where Ok(iter) } +/// Storage prefix iterator, reverse ordered by storage keys. It will try to get +/// an iterator from the storage. +pub fn rev_iter_prefix<'a, DB, H>( + gas_meter: &mut VpGasMeter, + storage: &'a Storage, + prefix: &Key, +) -> EnvResult<>::PrefixIter> +where + DB: storage::DB + for<'iter> storage::DBIter<'iter>, + H: StorageHasher, +{ + let (iter, gas) = storage.rev_iter_prefix(prefix); + add_gas(gas_meter, gas)?; + Ok(iter) +} + /// Storage prefix iterator for prior state (before tx execution). It will try /// to read from the storage. pub fn iter_pre_next( gas_meter: &mut VpGasMeter, iter: &mut >::PrefixIter, -) -> Result)>> +) -> EnvResult)>> where DB: storage::DB + for<'iter> storage::DBIter<'iter>, { @@ -306,7 +490,7 @@ pub fn iter_post_next( gas_meter: &mut VpGasMeter, write_log: &WriteLog, iter: &mut >::PrefixIter, -) -> Result)>> +) -> EnvResult)>> where DB: storage::DB + for<'iter> storage::DBIter<'iter>, { diff --git a/shared/src/proto/mod.rs b/shared/src/proto/mod.rs index 7c092fd5e8..215e76ac45 100644 --- a/shared/src/proto/mod.rs +++ b/shared/src/proto/mod.rs @@ -3,13 +3,11 @@ pub mod generated; mod types; -pub use types::{ - Dkg, Error, Intent, IntentGossipMessage, IntentId, Signed, SignedSerialize, - SignedTxData, Tx, -}; +pub use types::{Dkg, Error, Signed, SignedSerialize, SignedTxData, Tx}; #[cfg(test)] mod tests { + use data_encoding::HEXLOWER; use generated::types::Tx; use prost::Message; @@ -24,8 +22,8 @@ mod tests { }; let mut tx_bytes = vec![]; tx.encode(&mut tx_bytes).unwrap(); - let tx_hex = hex::encode(tx_bytes); - let tx_from_hex = hex::decode(tx_hex).unwrap(); + let tx_hex = HEXLOWER.encode(&tx_bytes); + let tx_from_hex = HEXLOWER.decode(tx_hex.as_ref()).unwrap(); let tx_from_bytes = Tx::decode(&tx_from_hex[..]).unwrap(); assert_eq!(tx, tx_from_bytes); } diff --git a/shared/src/proto/types.rs b/shared/src/proto/types.rs index 1e9a3ca861..5097d6a0be 100644 --- a/shared/src/proto/types.rs +++ b/shared/src/proto/types.rs @@ -1,7 +1,5 @@ -use std::collections::hash_map::DefaultHasher; use std::collections::HashMap; use std::convert::{TryFrom, TryInto}; -use std::fmt::Display; use std::hash::{Hash, Hasher}; use std::marker::PhantomData; @@ -20,12 +18,8 @@ use crate::types::transaction::hash_tx; pub enum Error { #[error("Error decoding a transaction from bytes: {0}")] TxDecodingError(prost::DecodeError), - #[error("Error decoding an IntentGossipMessage from bytes: {0}")] - IntentDecodingError(prost::DecodeError), #[error("Error decoding an DkgGossipMessage from bytes: {0}")] DkgDecodingError(prost::DecodeError), - #[error("Intent is empty")] - NoIntentError, #[error("Dkg is empty")] NoDkgError, #[error("Timestamp is empty")] @@ -42,7 +36,7 @@ pub type Result = std::result::Result; /// /// Because the signature is not checked by the ledger, we don't inline it into /// the `Tx` type directly. Instead, the signature is attached to the `tx.data`, -/// which is can then be checked by a validity predicate wasm. +/// which can then be checked by a validity predicate wasm. #[derive(Clone, Debug, BorshSerialize, BorshDeserialize, BorshSchema)] pub struct SignedTxData { /// The original tx data bytes, if any @@ -265,53 +259,6 @@ impl Tx { } } -#[derive(Clone, Debug, PartialEq)] -pub struct IntentGossipMessage { - pub intent: Intent, -} - -impl TryFrom<&[u8]> for IntentGossipMessage { - type Error = Error; - - fn try_from(intent_bytes: &[u8]) -> Result { - let intent = types::IntentGossipMessage::decode(intent_bytes) - .map_err(Error::IntentDecodingError)?; - match &intent.msg { - Some(types::intent_gossip_message::Msg::Intent(intent)) => { - Ok(IntentGossipMessage { - intent: intent.clone().try_into()?, - }) - } - None => Err(Error::NoIntentError), - } - } -} - -impl From for types::IntentGossipMessage { - fn from(message: IntentGossipMessage) -> Self { - types::IntentGossipMessage { - msg: Some(types::intent_gossip_message::Msg::Intent( - message.intent.into(), - )), - } - } -} - -impl IntentGossipMessage { - pub fn new(intent: Intent) -> Self { - IntentGossipMessage { intent } - } - - pub fn to_bytes(&self) -> Vec { - let mut bytes = vec![]; - let message: types::IntentGossipMessage = self.clone().into(); - message - .encode(&mut bytes) - .expect("encoding an intent gossip message failed"); - bytes - } -} - #[allow(dead_code)] #[derive(Clone, Debug, PartialEq)] pub struct DkgGossipMessage { @@ -361,67 +308,6 @@ impl DkgGossipMessage { } } -#[derive(Clone, Debug, PartialEq, Hash, Eq)] -pub struct Intent { - pub data: Vec, - pub timestamp: DateTimeUtc, -} - -impl TryFrom for Intent { - type Error = Error; - - fn try_from(intent: types::Intent) -> Result { - let timestamp = match intent.timestamp { - Some(t) => t.try_into().map_err(Error::InvalidTimestamp)?, - None => return Err(Error::NoTimestampError), - }; - Ok(Intent { - data: intent.data, - timestamp, - }) - } -} - -impl From for types::Intent { - fn from(intent: Intent) -> Self { - let timestamp = Some(intent.timestamp.into()); - types::Intent { - data: intent.data, - timestamp, - } - } -} - -impl Intent { - pub fn new(data: Vec) -> Self { - Intent { - data, - timestamp: DateTimeUtc::now(), - } - } - - pub fn id(&self) -> IntentId { - let mut hasher = DefaultHasher::new(); - self.hash(&mut hasher); - IntentId::from(hasher.finish().to_string()) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct IntentId(pub Vec); - -impl>> From for IntentId { - fn from(value: T) -> Self { - Self(value.into()) - } -} - -impl Display for IntentId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", hex::encode(&self.0)) - } -} - #[allow(dead_code)] #[derive(Clone, Debug, PartialEq)] pub struct Dkg { @@ -475,18 +361,6 @@ mod tests { } } - #[test] - fn test_intent_gossip_message() { - let data = "arbitrary data".as_bytes().to_owned(); - let intent = Intent::new(data); - let message = IntentGossipMessage::new(intent); - - let bytes = message.to_bytes(); - let message_from_bytes = IntentGossipMessage::try_from(bytes.as_ref()) - .expect("decoding failed"); - assert_eq!(message_from_bytes, message); - } - #[test] fn test_dkg_gossip_message() { let data = "arbitrary string".to_owned(); @@ -499,26 +373,6 @@ mod tests { assert_eq!(message_from_bytes, message); } - #[test] - fn test_intent() { - let data = "arbitrary data".as_bytes().to_owned(); - let intent = Intent::new(data.clone()); - - let types_intent: types::Intent = intent.clone().into(); - let intent_from_types = - Intent::try_from(types_intent).expect("no timestamp"); - assert_eq!(intent_from_types, intent); - - let types_intent = types::Intent { - data, - timestamp: None, - }; - match Intent::try_from(types_intent) { - Err(Error::NoTimestampError) => {} - _ => panic!("unexpected result"), - } - } - #[test] fn test_dkg() { let data = "arbitrary string".to_owned(); diff --git a/shared/src/types/address.rs b/shared/src/types/address.rs index 9954b4e568..74ab4f53fe 100644 --- a/shared/src/types/address.rs +++ b/shared/src/types/address.rs @@ -59,8 +59,8 @@ mod internal { "ano::Protocol Parameters "; pub const GOVERNANCE: &str = "ano::Governance "; - pub const TREASURY: &str = - "ano::Treasury "; + pub const SLASH_FUND: &str = + "ano::Slash Fund "; pub const IBC_BURN: &str = "ano::IBC Burn Address "; pub const IBC_MINT: &str = @@ -187,7 +187,9 @@ impl Address { InternalAddress::Governance => { internal::GOVERNANCE.to_string() } - InternalAddress::Treasury => internal::TREASURY.to_string(), + InternalAddress::SlashFund => { + internal::SLASH_FUND.to_string() + } InternalAddress::IbcEscrow(hash) => { format!("{}::{}", PREFIX_INTERNAL, hash) } @@ -250,8 +252,8 @@ impl Address { internal::GOVERNANCE => { Ok(Address::Internal(InternalAddress::Governance)) } - internal::TREASURY => { - Ok(Address::Internal(InternalAddress::Treasury)) + internal::SLASH_FUND => { + Ok(Address::Internal(InternalAddress::SlashFund)) } internal::IBC_MINT => { Ok(Address::Internal(InternalAddress::IbcMint)) @@ -455,8 +457,8 @@ pub enum InternalAddress { IbcMint, /// Governance address Governance, - /// Treasury address - Treasury, + /// SlashFund address for governance + SlashFund, /// Bridge to Ethereum EthBridge, /// The pool of transactions to be relayed to Ethereum @@ -485,7 +487,7 @@ impl Display for InternalAddress { Self::Ibc => "IBC".to_string(), Self::Parameters => "Parameters".to_string(), Self::Governance => "Governance".to_string(), - Self::Treasury => "Treasury".to_string(), + Self::SlashFund => "SlashFund".to_string(), Self::IbcEscrow(hash) => format!("IbcEscrow: {}", hash), Self::IbcBurn => "IbcBurn".to_string(), Self::IbcMint => "IbcMint".to_string(), @@ -723,7 +725,7 @@ pub mod testing { InternalAddress::PosSlashPool => {} InternalAddress::Ibc => {} InternalAddress::Governance => {} - InternalAddress::Treasury => {} + InternalAddress::SlashFund => {} InternalAddress::Parameters => {} InternalAddress::IbcEscrow(_) => {} InternalAddress::IbcBurn => {} @@ -742,7 +744,7 @@ pub mod testing { Just(InternalAddress::IbcBurn), Just(InternalAddress::IbcMint), Just(InternalAddress::Governance), - Just(InternalAddress::Treasury), + Just(InternalAddress::SlashFund), Just(InternalAddress::EthBridge), Just(InternalAddress::EthBridgePool), ] diff --git a/shared/src/types/governance.rs b/shared/src/types/governance.rs index c8bf57469f..5f82335cb2 100644 --- a/shared/src/types/governance.rs +++ b/shared/src/types/governance.rs @@ -5,6 +5,7 @@ use std::fmt::{self, Display}; use std::str::FromStr; use borsh::{BorshDeserialize, BorshSerialize}; +use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; use thiserror::Error; @@ -13,8 +14,12 @@ use super::hash::Hash; use super::key::common::{self, Signature}; use super::key::SigScheme; use super::storage::Epoch; +use super::token::SCALE; use super::transaction::governance::InitProposalData; +/// Type alias for vote power +pub type VotePower = u128; + #[derive( Debug, Clone, @@ -83,7 +88,38 @@ pub enum TallyResult { Unknown, } -impl fmt::Display for TallyResult { +/// The result with votes of a proposal +pub struct ProposalResult { + /// The result of a proposal + pub result: TallyResult, + /// The total voting power during the proposal tally + pub total_voting_power: VotePower, + /// The total voting power from yay votes + pub total_yay_power: VotePower, + /// The total voting power from nay votes (unused at the moment) + pub total_nay_power: VotePower, +} + +impl Display for ProposalResult { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let percentage = Decimal::checked_div( + self.total_yay_power.into(), + self.total_voting_power.into(), + ) + .unwrap_or_default(); + + write!( + f, + "{} with {} yay votes over {} ({:.2}%)", + self.result, + self.total_yay_power / SCALE as u128, + self.total_voting_power / SCALE as u128, + percentage.checked_mul(100.into()).unwrap_or_default() + ) + } +} + +impl Display for TallyResult { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { TallyResult::Passed => write!(f, "passed"), diff --git a/shared/src/types/hash.rs b/shared/src/types/hash.rs index ee06451635..4d3d01d4c8 100644 --- a/shared/src/types/hash.rs +++ b/shared/src/types/hash.rs @@ -3,6 +3,7 @@ use std::fmt::{self, Display}; use std::ops::Deref; +use arse_merkle_tree::traits::Value; use borsh::{BorshDeserialize, BorshSchema, BorshSerialize}; use hex::FromHex; use serde::{Deserialize, Serialize}; @@ -121,6 +122,11 @@ impl Hash { let digest = Sha256::digest(data.as_ref()); Self(*digest.as_ref()) } + + /// Check if the hash is all zeros + pub fn is_zero(&self) -> bool { + self == &Self::zero() + } } impl From for TmHash { diff --git a/shared/src/types/intent.rs b/shared/src/types/intent.rs deleted file mode 100644 index 3acb43f0c0..0000000000 --- a/shared/src/types/intent.rs +++ /dev/null @@ -1,479 +0,0 @@ -//! Intent data definitions and transaction and validity-predicate helpers. - -use std::collections::{HashMap, HashSet}; -use std::convert::TryFrom; -use std::io::ErrorKind; - -use borsh::{BorshDeserialize, BorshSerialize}; -use derivative::Derivative; -use rust_decimal::prelude::*; -use serde::{Deserialize, Serialize}; -use thiserror::Error; - -use crate::proto::Signed; -use crate::types::address::Address; -use crate::types::storage::{DbKeySeg, Key, KeySeg}; -use crate::types::token; - -/// A simple intent for fungible token trade -#[derive( - Debug, - Clone, - PartialEq, - BorshSerialize, - BorshDeserialize, - Serialize, - Deserialize, - Eq, -)] -pub struct FungibleTokenIntent { - /// List of exchange definitions - pub exchange: HashSet>, -} - -#[derive( - Debug, - Clone, - BorshSerialize, - BorshDeserialize, - Serialize, - Deserialize, - Eq, - PartialEq, - Hash, - PartialOrd, - Derivative, -)] -/// The definition of an intent exchange -pub struct Exchange { - /// The source address - pub addr: Address, - /// The token to be sold - pub token_sell: Address, - /// The minimum rate - pub rate_min: DecimalWrapper, - /// The maximum amount of token to be sold - pub max_sell: token::Amount, - /// The token to be bought - pub token_buy: Address, - /// The amount of token to be bought - pub min_buy: token::Amount, - /// The vp code - #[derivative(Debug = "ignore")] - pub vp: Option>, -} - -/// These are transfers crafted from matched [`Exchange`]s created by a -/// matchmaker program. -#[derive( - Debug, - Clone, - BorshSerialize, - BorshDeserialize, - Serialize, - Deserialize, - PartialEq, -)] -pub struct MatchedExchanges { - /// Transfers crafted from the matched intents - pub transfers: HashSet, - // TODO benchmark between an map or a set, see which is less costly - /// The exchanges that were matched - pub exchanges: HashMap>, - /// The intents - // TODO: refactor this without duplicating stuff. The exchanges in the - // `exchanges` hashmap are already contained in the FungibleTokenIntents - // belows - pub intents: HashMap>, -} - -/// These are transfers crafted from matched [`Exchange`]s with a source address -/// that is expected to sign this data. -#[derive( - Debug, - Clone, - BorshSerialize, - BorshDeserialize, - Serialize, - Deserialize, - PartialEq, -)] -pub struct IntentTransfers { - /// Matched exchanges - pub matches: MatchedExchanges, - /// Source address that should sign this data - pub source: Address, -} - -/// Struct holding a safe rapresentation of a float -#[derive( - Debug, - Clone, - Eq, - PartialEq, - Hash, - PartialOrd, - Serialize, - Deserialize, - Default, -)] -pub struct DecimalWrapper(pub Decimal); - -impl From for DecimalWrapper { - fn from(decimal: Decimal) -> Self { - DecimalWrapper(decimal) - } -} - -#[allow(missing_docs)] -#[derive(Error, Debug)] -pub enum Error { - #[error("Error parsing as decimal: {0}.")] - DecimalParseError(String), -} - -impl TryFrom for DecimalWrapper { - type Error = Error; - - fn try_from(amount: token::Amount) -> Result { - let decimal = Decimal::from_i128(amount.change()); - - match decimal { - Some(d) => Ok(DecimalWrapper::from(d)), - None => Err(Error::DecimalParseError(amount.change().to_string())), - } - } -} - -impl FromStr for DecimalWrapper { - type Err = Error; - - fn from_str(s: &str) -> Result { - let decimal = Decimal::from_str(s) - .map_err(|e| Self::Err::DecimalParseError(e.to_string())); - - match decimal { - Ok(d) => Ok(DecimalWrapper::from(d)), - Err(e) => Err(e), - } - } -} - -impl BorshSerialize for DecimalWrapper { - fn serialize( - &self, - writer: &mut W, - ) -> std::io::Result<()> { - let vec = self.0.to_string().as_bytes().to_vec(); - let bytes = vec - .try_to_vec() - .expect("DecimalWrapper bytes encoding shouldn't fail"); - writer.write_all(&bytes) - } -} - -impl BorshDeserialize for DecimalWrapper { - fn deserialize(buf: &mut &[u8]) -> std::io::Result { - // deserialize the bytes first - let bytes: Vec = - BorshDeserialize::deserialize(buf).map_err(|e| { - std::io::Error::new( - ErrorKind::InvalidInput, - format!("Error decoding DecimalWrapper: {}", e), - ) - })?; - let decimal_str: &str = - std::str::from_utf8(bytes.as_slice()).map_err(|e| { - std::io::Error::new( - ErrorKind::InvalidInput, - format!("Error decoding decimal: {}", e), - ) - })?; - let decimal = Decimal::from_str(decimal_str).map_err(|e| { - std::io::Error::new( - ErrorKind::InvalidInput, - format!("Error decoding decimal: {}", e), - ) - })?; - Ok(DecimalWrapper(decimal)) - } -} - -impl MatchedExchanges { - /// Create an empty [`MatchedExchanges`]. - pub fn empty() -> Self { - Self { - transfers: HashSet::new(), - exchanges: HashMap::new(), - intents: HashMap::new(), - } - } -} - -const INVALID_INTENT_STORAGE_KEY: &str = "invalid_intent"; - -/// Obtain a storage key for user's invalid intent set. -pub fn invalid_intent_key(owner: &Address) -> Key { - Key::from(owner.to_db_key()) - .push(&INVALID_INTENT_STORAGE_KEY.to_owned()) - .expect("Cannot obtain a storage key") -} - -/// Check if the given storage key is a key for a set of intent sig. If it is, -/// returns the owner. -pub fn is_invalid_intent_key(key: &Key) -> Option<&Address> { - match &key.segments[..] { - [DbKeySeg::AddressSeg(owner), DbKeySeg::StringSeg(key)] - if key == INVALID_INTENT_STORAGE_KEY => - { - Some(owner) - } - _ => None, - } -} - -#[cfg(test)] -mod tests { - use std::env; - use std::iter::FromIterator; - - use constants::*; - - use super::*; - use crate::ledger::storage::types::{decode, encode}; - use crate::types::key; - - #[test] - fn test_encode_decode_intent_transfer_without_vp() { - let bertha_addr = Address::from_str(BERTHA).unwrap(); - let albert_addr = Address::from_str(ALBERT).unwrap(); - - let bertha_keypair = key::testing::keypair_1(); - let albert_keypair = key::testing::keypair_2(); - - let exchange_one = Exchange { - addr: Address::from_str(BERTHA).unwrap(), - token_buy: Address::from_str(XAN).unwrap(), - token_sell: Address::from_str(BTC).unwrap(), - max_sell: token::Amount::from(100), - min_buy: token::Amount::from(1), - rate_min: DecimalWrapper::from_str("0.1").unwrap(), - vp: None, - }; - let exchange_two = Exchange { - addr: Address::from_str(ALBERT).unwrap(), - token_buy: Address::from_str(BTC).unwrap(), - token_sell: Address::from_str(XAN).unwrap(), - max_sell: token::Amount::from(1), - min_buy: token::Amount::from(100), - rate_min: DecimalWrapper::from_str("10").unwrap(), - vp: None, - }; - - let signed_exchange_one = Signed::new(&bertha_keypair, exchange_one); - let signed_exchange_two = Signed::new(&bertha_keypair, exchange_two); - - let mut it = MatchedExchanges::empty(); - it.exchanges = HashMap::<_, _>::from_iter( - vec![ - (bertha_addr.clone(), signed_exchange_one.clone()), - (albert_addr.clone(), signed_exchange_two.clone()), - ] - .into_iter(), - ); - - it.intents = HashMap::<_, _>::from_iter( - vec![ - ( - bertha_addr.clone(), - Signed::new( - &bertha_keypair, - FungibleTokenIntent { - exchange: HashSet::from_iter(vec![ - signed_exchange_one, - ]), - }, - ), - ), - ( - albert_addr.clone(), - Signed::new( - &albert_keypair, - FungibleTokenIntent { - exchange: HashSet::from_iter(vec![ - signed_exchange_two, - ]), - }, - ), - ), - ] - .into_iter(), - ); - - it.transfers = HashSet::<_>::from_iter( - vec![ - token::Transfer { - source: bertha_addr.clone(), - target: albert_addr.clone(), - token: Address::from_str(BTC).unwrap(), - sub_prefix: None, - amount: token::Amount::from(100), - }, - token::Transfer { - source: albert_addr, - target: bertha_addr, - token: Address::from_str(XAN).unwrap(), - sub_prefix: None, - amount: token::Amount::from(1), - }, - ] - .into_iter(), - ); - - let encoded_intent_transfer = encode(&it); - let decoded_intent_transfer: MatchedExchanges = - decode(encoded_intent_transfer).unwrap(); - - assert!(decoded_intent_transfer == it); - } - - #[test] - fn test_encode_decode_intent_transfer_with_vp() { - let bertha_addr = Address::from_str(BERTHA).unwrap(); - let albert_addr = Address::from_str(ALBERT).unwrap(); - - let bertha_keypair = key::testing::keypair_1(); - let albert_keypair = key::testing::keypair_2(); - - let working_dir = env::current_dir().unwrap(); - - let exchange_one = Exchange { - addr: Address::from_str(BERTHA).unwrap(), - token_buy: Address::from_str(XAN).unwrap(), - token_sell: Address::from_str(BTC).unwrap(), - max_sell: token::Amount::from(100), - min_buy: token::Amount::from(1), - rate_min: DecimalWrapper::from_str("0.1").unwrap(), - vp: Some( - std::fs::read(format!( - "{}/../{}", - working_dir.to_string_lossy(), - VP_ALWAYS_FALSE_WASM - )) - .unwrap(), - ), - }; - let exchange_two = Exchange { - addr: Address::from_str(ALBERT).unwrap(), - token_buy: Address::from_str(BTC).unwrap(), - token_sell: Address::from_str(XAN).unwrap(), - max_sell: token::Amount::from(1), - min_buy: token::Amount::from(100), - rate_min: DecimalWrapper::from_str("10").unwrap(), - vp: Some( - std::fs::read(format!( - "{}/../{}", - working_dir.to_string_lossy(), - VP_ALWAYS_TRUE_WASM - )) - .unwrap(), - ), - }; - - let signed_exchange_one = Signed::new(&bertha_keypair, exchange_one); - let signed_exchange_two = Signed::new(&bertha_keypair, exchange_two); - - let mut it = MatchedExchanges::empty(); - it.exchanges = HashMap::<_, _>::from_iter( - vec![ - (bertha_addr.clone(), signed_exchange_one.clone()), - (albert_addr.clone(), signed_exchange_two.clone()), - ] - .into_iter(), - ); - - it.intents = HashMap::<_, _>::from_iter( - vec![ - ( - bertha_addr.clone(), - Signed::new( - &bertha_keypair, - FungibleTokenIntent { - exchange: HashSet::from_iter(vec![ - signed_exchange_one, - ]), - }, - ), - ), - ( - albert_addr.clone(), - Signed::new( - &albert_keypair, - FungibleTokenIntent { - exchange: HashSet::from_iter(vec![ - signed_exchange_two, - ]), - }, - ), - ), - ] - .into_iter(), - ); - - it.transfers = HashSet::<_>::from_iter( - vec![ - token::Transfer { - source: bertha_addr.clone(), - target: albert_addr.clone(), - token: Address::from_str(BTC).unwrap(), - sub_prefix: None, - amount: token::Amount::from(100), - }, - token::Transfer { - source: albert_addr, - target: bertha_addr, - token: Address::from_str(XAN).unwrap(), - sub_prefix: None, - amount: token::Amount::from(1), - }, - ] - .into_iter(), - ); - - let encoded_intent_transfer = encode(&it); - let decoded_intent_transfer: MatchedExchanges = - decode(encoded_intent_transfer).unwrap(); - - assert!(decoded_intent_transfer == it); - } - - #[cfg(test)] - #[allow(dead_code)] - mod constants { - - // User addresses - pub const ALBERT: &str = "atest1v4ehgw368ycryv2z8qcnxv3cxgmrgvjpxs6yg333gym5vv2zxepnj334g4rryvj9xucrgve4x3xvr4"; - pub const BERTHA: &str = "atest1v4ehgw36xvcyyvejgvenxs34g3zygv3jxqunjd6rxyeyys3sxy6rwvfkx4qnj33hg9qnvse4lsfctw"; - pub const CHRISTEL: &str = "atest1v4ehgw36x3qng3jzggu5yvpsxgcngv2xgguy2dpkgvu5x33kx3pr2w2zgep5xwfkxscrxs2pj8075p"; - - // Fungible token addresses - pub const XAN: &str = "atest1v4ehgw36x3prswzxggunzv6pxqmnvdj9xvcyzvpsggeyvs3cg9qnywf589qnwvfsg5erg3fkl09rg5"; - pub const BTC: &str = "atest1v4ehgw36xdzryve5gsc52veeg5cnsv2yx5eygvp38qcrvd29xy6rys6p8yc5xvp4xfpy2v694wgwcp"; - pub const ETH: &str = "atest1v4ehgw36xqmr2d3nx3ryvd2xxgmrq33j8qcns33sxezrgv6zxdzrydjrxveygd2yxumrsdpsf9jc2p"; - pub const DOT: &str = "atest1v4ehgw36gg6nvs2zgfpyxsfjgc65yv6pxy6nwwfsxgungdzrggeyzv35gveyxsjyxymyz335hur2jn"; - - // Bite-sized tokens - pub const SCHNITZEL: &str = "atest1v4ehgw36xue5xvf5xvuyzvpjx5un2v3k8qeyvd3cxdqns32p89rrxd6xx9zngvpegccnzs699rdnnt"; - pub const APFEL: &str = "atest1v4ehgw36gfryydj9g3p5zv3kg9znyd358ycnzsfcggc5gvecgc6ygs2rxv6ry3zpg4zrwdfeumqcz9"; - pub const KARTOFFEL: &str = "atest1v4ehgw36gep5ysecxq6nyv3jg3zygv3e89qn2vp48pryxsf4xpznvve5gvmy23fs89pryvf5a6ht90"; - - // Paths to the WASMs used for tests - pub const TX_TRANSFER_WASM: &str = "wasm/tx_transfer.wasm"; - pub const VP_USER_WASM: &str = "wasm/vp_user.wasm"; - pub const TX_NO_OP_WASM: &str = "wasm_for_tests/tx_no_op.wasm"; - pub const VP_ALWAYS_TRUE_WASM: &str = - "wasm_for_tests/vp_always_true.wasm"; - pub const VP_ALWAYS_FALSE_WASM: &str = - "wasm_for_tests/vp_always_false.wasm"; - } -} diff --git a/shared/src/types/key/common.rs b/shared/src/types/key/common.rs index 633367053c..8144acf466 100644 --- a/shared/src/types/key/common.rs +++ b/shared/src/types/key/common.rs @@ -6,15 +6,16 @@ use std::str::FromStr; use borsh::{BorshDeserialize, BorshSchema, BorshSerialize}; use data_encoding::HEXLOWER; +use namada_proof_of_stake::types::PublicKeyTmRawHash; #[cfg(feature = "rand")] use rand::{CryptoRng, RngCore}; use serde::{Deserialize, Serialize}; use thiserror::Error; use super::{ - ed25519, secp256k1, ParsePublicKeyError, ParseSecretKeyError, - ParseSignatureError, RefTo, SchemeType, SigScheme as SigSchemeTrait, - VerifySigError, + ed25519, secp256k1, tm_consensus_key_raw_hash, ParsePublicKeyError, + ParseSecretKeyError, ParseSignatureError, RefTo, SchemeType, + SigScheme as SigSchemeTrait, VerifySigError, }; use crate::types::ethereum_events::EthAddress; @@ -344,3 +345,9 @@ impl super::SigScheme for SigScheme { } } } + +impl PublicKeyTmRawHash for PublicKey { + fn tm_raw_hash(&self) -> String { + tm_consensus_key_raw_hash(self) + } +} diff --git a/shared/src/types/key/mod.rs b/shared/src/types/key/mod.rs index f2908f155b..7afb811810 100644 --- a/shared/src/types/key/mod.rs +++ b/shared/src/types/key/mod.rs @@ -450,6 +450,12 @@ pub mod testing { }) } + /// Generate an arbitrary [`common::SecretKey`]. + pub fn arb_common_keypair() -> impl Strategy { + arb_keypair::() + .prop_map(|keypair| keypair.try_to_sk().unwrap()) + } + /// Generate a new random [`super::SecretKey`]. pub fn gen_keypair() -> S::SecretKey { let mut rng: ThreadRng = thread_rng(); diff --git a/shared/src/types/matchmaker.rs b/shared/src/types/matchmaker.rs deleted file mode 100644 index 5ee67a83ed..0000000000 --- a/shared/src/types/matchmaker.rs +++ /dev/null @@ -1,30 +0,0 @@ -//! Matchmaker types - -use std::collections::HashSet; - -/// A matchmaker marker trait. This should not be implemented manually. Instead, -/// it is added by the derive `Matchmaker` macro, which also adds necessary -/// binding code for matchmaker dylib runner. -pub trait Matchmaker: AddIntent {} - -/// A matchmaker must implement this trait -pub trait AddIntent: Default { - // TODO: For some reason, using `&[u8]` causes the `decode_intent_data` to - // fail decoding - /// Add a new intent to matchmaker's state - #[allow(clippy::ptr_arg)] - fn add_intent( - &mut self, - intent_id: &Vec, - intent_data: &Vec, - ) -> AddIntentResult; -} - -/// The result of calling matchmaker's `add_intent` function -#[derive(Clone, Debug, Default)] -pub struct AddIntentResult { - /// A transaction matched from the intent, if any - pub tx: Option>, - /// The intent IDs that were matched into the tx, if any - pub matched_intents: Option>>, -} diff --git a/shared/src/types/mod.rs b/shared/src/types/mod.rs index 72db38b769..5690f00ec9 100644 --- a/shared/src/types/mod.rs +++ b/shared/src/types/mod.rs @@ -8,11 +8,9 @@ pub mod ethereum_events; pub mod governance; pub mod hash; pub mod ibc; -pub mod intent; pub mod internal; pub mod keccak; pub mod key; -pub mod matchmaker; pub mod nft; pub mod storage; pub mod time; diff --git a/shared/src/types/storage.rs b/shared/src/types/storage.rs index eee9b5b847..3d1c3f772d 100644 --- a/shared/src/types/storage.rs +++ b/shared/src/types/storage.rs @@ -8,6 +8,7 @@ use std::str::FromStr; use arse_merkle_tree::InternalKey; use borsh::{BorshDeserialize, BorshSchema, BorshSerialize}; +use data_encoding::BASE32HEX_NOPAD; use ics23::CommitmentProof; use serde::{Deserialize, Serialize}; use thiserror::Error; @@ -34,6 +35,8 @@ pub enum Error { ParseAddressFromKey, #[error("Reserved prefix or string is specified: {0}")] InvalidKeySeg(String), + #[error("Error parsing key segment {0}")] + ParseKeySeg(String), #[error("Could not parse string into a key segment: {0}")] ParseError(String), } @@ -209,6 +212,7 @@ impl Header { BorshDeserialize, BorshSchema, Debug, + Default, Eq, PartialEq, Ord, @@ -222,6 +226,13 @@ pub struct Key { pub segments: Vec, } +/// A [`Key`] made of borrowed key segments [`DbKeySeg`]. +#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub struct KeyRef<'a> { + /// Reference of key segments + pub segments: &'a [DbKeySeg], +} + impl From for Key { fn from(seg: DbKeySeg) -> Self { Self { @@ -418,6 +429,23 @@ impl Key { self.len() == 0 } + /// Returns the first segment of the key, or `None` if it is empty. + pub fn first(&self) -> Option<&DbKeySeg> { + self.segments.first() + } + + /// Returns the last segment of the key, or `None` if it is empty. + pub fn last(&self) -> Option<&DbKeySeg> { + self.segments.last() + } + + /// Returns the prefix before the last segment and last segment of the key, + /// or `None` if it is empty. + pub fn split_last(&self) -> Option<(KeyRef<'_>, &DbKeySeg)> { + let (last, prefix) = self.segments.split_last()?; + Some((KeyRef { segments: prefix }, last)) + } + /// Returns a key of the validity predicate of the given address /// Only this function can push "?" segment for validity predicate pub fn validity_predicate(addr: &Address) -> Self { @@ -461,8 +489,11 @@ impl Key { .split_off(2) .join(&KEY_SEGMENT_SEPARATOR.to_string()), ) - .map_err(|e| Error::Temporary { - error: format!("Cannot parse key segments {}: {}", db_key, e), + .map_err(|e| { + Error::ParseKeySeg(format!( + "Cannot parse key segments {}: {}", + db_key, e + )) })?, }; Ok(key) @@ -490,6 +521,28 @@ impl Key { }), } } + + /// Check if the key begins with the given prefix and returns: + /// - `Some(Some(suffix))` the suffix after the match with, if any, or + /// - `Some(None)` if the prefix is matched, but it has no suffix, or + /// - `None` if it doesn't match + pub fn split_prefix(&self, prefix: &Self) -> Option> { + if self.segments.len() < prefix.segments.len() { + return None; + } else if self == prefix { + return Some(None); + } + // This is safe, because we check that the length of segments in self >= + // in prefix above + let (self_prefix, rest) = self.segments.split_at(prefix.segments.len()); + if self_prefix == prefix.segments { + Some(Some(Key { + segments: rest.to_vec(), + })) + } else { + None + } + } } impl Display for Key { @@ -504,6 +557,20 @@ impl Display for Key { } } +impl KeyRef<'_> { + /// Check if [`KeyRef`] is equal to a [`Key`]. + pub fn eq_owned(&self, other: &Key) -> bool { + self.segments == other.segments + } + + /// Returns the prefix before the last segment and last segment of the key, + /// or `None` if it is empty. + pub fn split_last(&self) -> Option<(KeyRef<'_>, &DbKeySeg)> { + let (last, prefix) = self.segments.split_last()?; + Some((KeyRef { segments: prefix }, last)) + } +} + // TODO use std::convert::{TryFrom, Into}? /// Represents a segment in a path that may be used as a database key pub trait KeySeg { @@ -544,7 +611,7 @@ pub enum DbKeySeg { impl KeySeg for DbKeySeg { fn parse(mut string: String) -> Result { - // a separator should not included + // a separator should not be included if string.contains(KEY_SEGMENT_SEPARATOR) { return Err(Error::InvalidKeySeg(string)); } @@ -590,14 +657,17 @@ impl KeySeg for String { impl KeySeg for BlockHeight { fn parse(string: String) -> Result { - let h = string.parse::().map_err(|e| Error::Temporary { - error: format!("Unexpected height value {}, {}", string, e), + let h = string.parse::().map_err(|e| { + Error::ParseKeySeg(format!( + "Unexpected height value {}, {}", + string, e + )) })?; Ok(BlockHeight(h)) } fn raw(&self) -> String { - format!("{}", self.0) + self.0.raw() } fn to_db_key(&self) -> DbKeySeg { @@ -656,6 +726,67 @@ impl KeySeg for KeccakHash { } } +/// Implement [`KeySeg`] for a type via base32hex of its BE bytes (using +/// `to_le_bytes()` and `from_le_bytes` methods) that maintains sort order of +/// the original data. +// TODO this could be a bit more efficient without the string conversion (atm +// with base32hex), if we can use bytes for storage key directly (which we can +// with rockDB, but atm, we're calling `to_string()` using the custom `Display` +// impl from here) +macro_rules! impl_int_key_seg { + ($unsigned:ty, $signed:ty, $len:literal) => { + impl KeySeg for $unsigned { + fn parse(string: String) -> Result { + let bytes = + BASE32HEX_NOPAD.decode(string.as_ref()).map_err(|err| { + Error::ParseKeySeg(format!( + "Failed parsing {} with {}", + string, err + )) + })?; + let mut fixed_bytes = [0; $len]; + fixed_bytes.copy_from_slice(&bytes); + Ok(<$unsigned>::from_be_bytes(fixed_bytes)) + } + + fn raw(&self) -> String { + BASE32HEX_NOPAD.encode(&self.to_be_bytes()) + } + + fn to_db_key(&self) -> DbKeySeg { + DbKeySeg::StringSeg(self.raw()) + } + } + + impl KeySeg for $signed { + fn parse(string: String) -> Result { + // get signed int from a unsigned int complemented with a min + // value + let complemented = <$unsigned>::parse(string)?; + let signed = (complemented as $signed) ^ <$signed>::MIN; + Ok(signed) + } + + fn raw(&self) -> String { + // signed int is converted to unsigned int that preserves the + // order by complementing it with a min value + let complemented = (*self ^ <$signed>::MIN) as $unsigned; + complemented.raw() + } + + fn to_db_key(&self) -> DbKeySeg { + DbKeySeg::StringSeg(self.raw()) + } + } + }; +} + +impl_int_key_seg!(u8, i8, 1); +impl_int_key_seg!(u16, i16, 2); +impl_int_key_seg!(u32, i32, 4); +impl_int_key_seg!(u64, i64, 8); +impl_int_key_seg!(u128, i128, 16); + /// Epoch identifier. Epochs are identified by consecutive numbers. #[derive( Clone, diff --git a/shared/src/types/token.rs b/shared/src/types/token.rs index a4a123da12..787a8855dc 100644 --- a/shared/src/types/token.rs +++ b/shared/src/types/token.rs @@ -139,6 +139,12 @@ impl From for u64 { } } +impl From for u128 { + fn from(amount: Amount) -> Self { + u128::from(amount.micro) + } +} + impl Add for Amount { type Output = Amount; @@ -458,3 +464,21 @@ mod tests { assert_eq!("0", zero.to_string()); } } + +/// Helpers for testing with addresses. +#[cfg(any(test, feature = "testing"))] +pub mod testing { + use proptest::prelude::*; + + use super::*; + + /// Generate an arbitrary token amount + pub fn arb_amount() -> impl Strategy { + any::().prop_map(Amount::from) + } + + /// Generate an arbitrary token amount up to and including given `max` value + pub fn arb_amount_ceiled(max: u64) -> impl Strategy { + (0..=max).prop_map(Amount::from) + } +} diff --git a/shared/src/types/vote_extensions/ethereum_events.rs b/shared/src/types/vote_extensions/ethereum_events.rs index ac0993a3b7..9e97692b53 100644 --- a/shared/src/types/vote_extensions/ethereum_events.rs +++ b/shared/src/types/vote_extensions/ethereum_events.rs @@ -233,7 +233,7 @@ mod tests { .collect(); #[cfg(feature = "abcipp")] let signers = { - let mut s = HashSet::new(); + let mut s = BTreeSet::new(); s.insert(validator_1.clone()); s.insert(validator_2); s diff --git a/shared/src/vm/host_env.rs b/shared/src/vm/host_env.rs index 88bd81b918..ea8d45e9b0 100644 --- a/shared/src/vm/host_env.rs +++ b/shared/src/vm/host_env.rs @@ -41,15 +41,10 @@ pub enum TxRuntimeError { OutOfGas(gas::Error), #[error("Trying to modify storage for an address that doesn't exit {0}")] UnknownAddressStorageModification(Address), - #[error("Trying to update a validity predicate with an invalid WASM {0}")] - UpdateVpInvalid(WasmValidationError), + #[error("Trying to use a validity predicate with an invalid WASM {0}")] + InvalidVpCode(WasmValidationError), #[error("A validity predicate of an account cannot be deleted")] CannotDeleteVp, - #[error( - "Trying to initialize an account with an invalid validity predicate \ - WASM {0}" - )] - InitAccountInvalidVpWasm(WasmValidationError), #[error("Storage modification error: {0}")] StorageModificationError(write_log::Error), #[error("Storage error: {0}")] @@ -69,7 +64,7 @@ pub enum TxRuntimeError { type TxResult = std::result::Result; /// A transaction's host environment -pub struct TxEnv<'a, MEM, DB, H, CA> +pub struct TxVmEnv<'a, MEM, DB, H, CA> where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -113,7 +108,7 @@ where pub cache_access: std::marker::PhantomData, } -impl<'a, MEM, DB, H, CA> TxEnv<'a, MEM, DB, H, CA> +impl<'a, MEM, DB, H, CA> TxVmEnv<'a, MEM, DB, H, CA> where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -168,7 +163,7 @@ where } } -impl Clone for TxEnv<'_, MEM, DB, H, CA> +impl Clone for TxVmEnv<'_, MEM, DB, H, CA> where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -208,7 +203,7 @@ where } /// A validity predicate's host environment -pub struct VpEnv<'a, MEM, DB, H, EVAL, CA> +pub struct VpVmEnv<'a, MEM, DB, H, EVAL, CA> where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -283,7 +278,7 @@ pub trait VpEvaluator { ) -> HostEnvResult; } -impl<'a, MEM, DB, H, EVAL, CA> VpEnv<'a, MEM, DB, H, EVAL, CA> +impl<'a, MEM, DB, H, EVAL, CA> VpVmEnv<'a, MEM, DB, H, EVAL, CA> where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -332,7 +327,7 @@ where } } -impl Clone for VpEnv<'_, MEM, DB, H, EVAL, CA> +impl Clone for VpVmEnv<'_, MEM, DB, H, EVAL, CA> where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -436,7 +431,7 @@ where /// Called from tx wasm to request to use the given gas amount pub fn tx_charge_gas( - env: &TxEnv, + env: &TxVmEnv, used_gas: i32, ) -> TxResult<()> where @@ -455,7 +450,7 @@ where /// Add a gas cost incured in a transaction pub fn tx_add_gas( - env: &TxEnv, + env: &TxVmEnv, used_gas: u64, ) -> TxResult<()> where @@ -478,9 +473,9 @@ where /// Called from VP wasm to request to use the given gas amount pub fn vp_charge_gas( - env: &VpEnv, + env: &VpVmEnv, used_gas: i32, -) -> vp_env::Result<()> +) -> vp_env::EnvResult<()> where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -500,7 +495,7 @@ where /// Storage `has_key` function exposed to the wasm VM Tx environment. It will /// try to check the write log first and if no entry found then the storage. pub fn tx_has_key( - env: &TxEnv, + env: &TxVmEnv, key_ptr: u64, key_len: u64, ) -> TxResult @@ -556,7 +551,7 @@ where /// Returns `-1` when the key is not present, or the length of the data when /// the key is present (the length may be `0`). pub fn tx_read( - env: &TxEnv, + env: &TxVmEnv, key_ptr: u64, key_len: u64, ) -> TxResult @@ -646,7 +641,7 @@ where /// any) back to the guest, the second step reads the value from cache into a /// pre-allocated buffer with the obtained size. pub fn tx_result_buffer( - env: &TxEnv, + env: &TxVmEnv, result_ptr: u64, ) -> TxResult<()> where @@ -666,9 +661,9 @@ where /// Storage prefix iterator function exposed to the wasm VM Tx environment. /// It will try to get an iterator from the storage and return the corresponding -/// ID of the iterator. +/// ID of the iterator, ordered by storage keys. pub fn tx_iter_prefix( - env: &TxEnv, + env: &TxVmEnv, prefix_ptr: u64, prefix_len: u64, ) -> TxResult @@ -696,6 +691,38 @@ where Ok(iterators.insert(iter).id()) } +/// Storage prefix iterator function exposed to the wasm VM Tx environment. +/// It will try to get an iterator from the storage and return the corresponding +/// ID of the iterator, reverse ordered by storage keys. +pub fn tx_rev_iter_prefix( + env: &TxVmEnv, + prefix_ptr: u64, + prefix_len: u64, +) -> TxResult +where + MEM: VmMemory, + DB: storage::DB + for<'iter> storage::DBIter<'iter>, + H: StorageHasher, + CA: WasmCacheAccess, +{ + let (prefix, gas) = env + .memory + .read_string(prefix_ptr, prefix_len as _) + .map_err(|e| TxRuntimeError::MemoryError(Box::new(e)))?; + tx_add_gas(env, gas)?; + + tracing::debug!("tx_rev_iter_prefix {}, prefix {}", prefix, prefix_ptr); + + let prefix = + Key::parse(prefix).map_err(TxRuntimeError::StorageDataError)?; + + let storage = unsafe { env.ctx.storage.get() }; + let iterators = unsafe { env.ctx.iterators.get() }; + let (iter, gas) = storage.rev_iter_prefix(&prefix); + tx_add_gas(env, gas)?; + Ok(iterators.insert(iter).id()) +} + /// Storage prefix iterator next function exposed to the wasm VM Tx environment. /// It will try to read from the write log first and if no entry found then from /// the storage. @@ -703,7 +730,7 @@ where /// Returns `-1` when the key is not present, or the length of the data when /// the key is present (the length may be `0`). pub fn tx_iter_next( - env: &TxEnv, + env: &TxVmEnv, iter_id: u64, ) -> TxResult where @@ -782,7 +809,7 @@ where /// Storage write function exposed to the wasm VM Tx environment. The given /// key/value will be written to the write log. pub fn tx_write( - env: &TxEnv, + env: &TxVmEnv, key_ptr: u64, key_len: u64, val_ptr: u64, @@ -808,6 +835,9 @@ where tracing::debug!("tx_update {}, {:?}", key, value); let key = Key::parse(key).map_err(TxRuntimeError::StorageDataError)?; + if key.is_validity_predicate().is_some() { + tx_validate_vp_code(env, &value)?; + } check_address_existence(env, &key)?; @@ -823,7 +853,7 @@ where /// given key/value will be written only to the write log. It will be never /// written to the storage. pub fn tx_write_temp( - env: &TxEnv, + env: &TxVmEnv, key_ptr: u64, key_len: u64, val_ptr: u64, @@ -861,7 +891,7 @@ where } fn check_address_existence( - env: &TxEnv, + env: &TxVmEnv, key: &Key, ) -> TxResult<()> where @@ -905,7 +935,7 @@ where /// Storage delete function exposed to the wasm VM Tx environment. The given /// key/value will be written as deleted to the write log. pub fn tx_delete( - env: &TxEnv, + env: &TxVmEnv, key_ptr: u64, key_len: u64, ) -> TxResult<()> @@ -939,7 +969,7 @@ where /// Emitting an IBC event function exposed to the wasm VM Tx environment. /// The given IBC event will be set to the write log. pub fn tx_emit_ibc_event( - env: &TxEnv, + env: &TxVmEnv, event_ptr: u64, event_len: u64, ) -> TxResult<()> @@ -967,10 +997,10 @@ where /// Returns `-1` when the key is not present, or the length of the data when /// the key is present (the length may be `0`). pub fn vp_read_pre( - env: &VpEnv, + env: &VpVmEnv, key_ptr: u64, key_len: u64, -) -> vp_env::Result +) -> vp_env::EnvResult where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1018,10 +1048,10 @@ where /// Returns `-1` when the key is not present, or the length of the data when /// the key is present (the length may be `0`). pub fn vp_read_post( - env: &VpEnv, + env: &VpVmEnv, key_ptr: u64, key_len: u64, -) -> vp_env::Result +) -> vp_env::EnvResult where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1064,10 +1094,10 @@ where /// Returns `-1` when the key is not present, or the length of the data when /// the key is present (the length may be `0`). pub fn vp_read_temp( - env: &VpEnv, + env: &VpVmEnv, key_ptr: u64, key_len: u64, -) -> vp_env::Result +) -> vp_env::EnvResult where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1112,9 +1142,9 @@ where /// any) back to the guest, the second step reads the value from cache into a /// pre-allocated buffer with the obtained size. pub fn vp_result_buffer( - env: &VpEnv, + env: &VpVmEnv, result_ptr: u64, -) -> vp_env::Result<()> +) -> vp_env::EnvResult<()> where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1135,10 +1165,10 @@ where /// Storage `has_key` in prior state (before tx execution) function exposed to /// the wasm VM VP environment. It will try to read from the storage. pub fn vp_has_key_pre( - env: &VpEnv, + env: &VpVmEnv, key_ptr: u64, key_len: u64, -) -> vp_env::Result +) -> vp_env::EnvResult where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1166,10 +1196,10 @@ where /// to the wasm VM VP environment. It will try to check the write log first and /// if no entry found then the storage. pub fn vp_has_key_post( - env: &VpEnv, + env: &VpVmEnv, key_ptr: u64, key_len: u64, -) -> vp_env::Result +) -> vp_env::EnvResult where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1196,12 +1226,12 @@ where /// Storage prefix iterator function exposed to the wasm VM VP environment. /// It will try to get an iterator from the storage and return the corresponding -/// ID of the iterator. +/// ID of the iterator, ordered by storage keys. pub fn vp_iter_prefix( - env: &VpEnv, + env: &VpVmEnv, prefix_ptr: u64, prefix_len: u64, -) -> vp_env::Result +) -> vp_env::EnvResult where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1226,15 +1256,47 @@ where Ok(iterators.insert(iter).id()) } +/// Storage prefix iterator function exposed to the wasm VM VP environment. +/// It will try to get an iterator from the storage and return the corresponding +/// ID of the iterator, reverse ordered by storage keys. +pub fn vp_rev_iter_prefix( + env: &VpVmEnv, + prefix_ptr: u64, + prefix_len: u64, +) -> vp_env::EnvResult +where + MEM: VmMemory, + DB: storage::DB + for<'iter> storage::DBIter<'iter>, + H: StorageHasher, + EVAL: VpEvaluator, + CA: WasmCacheAccess, +{ + let (prefix, gas) = env + .memory + .read_string(prefix_ptr, prefix_len as _) + .map_err(|e| vp_env::RuntimeError::MemoryError(Box::new(e)))?; + let gas_meter = unsafe { env.ctx.gas_meter.get() }; + vp_env::add_gas(gas_meter, gas)?; + + let prefix = + Key::parse(prefix).map_err(vp_env::RuntimeError::StorageDataError)?; + tracing::debug!("vp_rev_iter_prefix {}", prefix); + + let storage = unsafe { env.ctx.storage.get() }; + let iter = vp_env::rev_iter_prefix(gas_meter, storage, &prefix)?; + let iterators = unsafe { env.ctx.iterators.get() }; + Ok(iterators.insert(iter).id()) +} + /// Storage prefix iterator for prior state (before tx execution) function /// exposed to the wasm VM VP environment. It will try to read from the storage. /// /// Returns `-1` when the key is not present, or the length of the data when /// the key is present (the length may be `0`). pub fn vp_iter_pre_next( - env: &VpEnv, + env: &VpVmEnv, iter_id: u64, -) -> vp_env::Result +) -> vp_env::EnvResult where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1272,9 +1334,9 @@ where /// Returns `-1` when the key is not present, or the length of the data when /// the key is present (the length may be `0`). pub fn vp_iter_post_next( - env: &VpEnv, + env: &VpVmEnv, iter_id: u64, -) -> vp_env::Result +) -> vp_env::EnvResult where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1309,7 +1371,7 @@ where /// Verifier insertion function exposed to the wasm VM Tx environment. pub fn tx_insert_verifier( - env: &TxEnv, + env: &TxVmEnv, addr_ptr: u64, addr_len: u64, ) -> TxResult<()> @@ -1336,7 +1398,7 @@ where /// Update a validity predicate function exposed to the wasm VM Tx environment pub fn tx_update_validity_predicate( - env: &TxEnv, + env: &TxVmEnv, addr_ptr: u64, addr_len: u64, code_ptr: u64, @@ -1364,8 +1426,7 @@ where .map_err(|e| TxRuntimeError::MemoryError(Box::new(e)))?; tx_add_gas(env, gas)?; - tx_add_gas(env, code.len() as u64 * WASM_VALIDATION_GAS_PER_BYTE)?; - validate_untrusted_wasm(&code).map_err(TxRuntimeError::UpdateVpInvalid)?; + tx_validate_vp_code(env, &code)?; let write_log = unsafe { env.ctx.write_log.get() }; let (gas, _size_diff) = write_log @@ -1377,7 +1438,7 @@ where /// Initialize a new account established address. pub fn tx_init_account( - env: &TxEnv, + env: &TxVmEnv, code_ptr: u64, code_len: u64, result_ptr: u64, @@ -1394,9 +1455,7 @@ where .map_err(|e| TxRuntimeError::MemoryError(Box::new(e)))?; tx_add_gas(env, gas)?; - tx_add_gas(env, code.len() as u64 * WASM_VALIDATION_GAS_PER_BYTE)?; - validate_untrusted_wasm(&code) - .map_err(TxRuntimeError::InitAccountInvalidVpWasm)?; + tx_validate_vp_code(env, &code)?; #[cfg(feature = "wasm-runtime")] { let vp_wasm_cache = unsafe { env.ctx.vp_wasm_cache.get() }; @@ -1420,7 +1479,7 @@ where /// Getting the chain ID function exposed to the wasm VM Tx environment. pub fn tx_get_chain_id( - env: &TxEnv, + env: &TxVmEnv, result_ptr: u64, ) -> TxResult<()> where @@ -1443,7 +1502,7 @@ where /// environment. The height is that of the block to which the current /// transaction is being applied. pub fn tx_get_block_height( - env: &TxEnv, + env: &TxVmEnv, ) -> TxResult where MEM: VmMemory, @@ -1460,7 +1519,7 @@ where /// Getting the block hash function exposed to the wasm VM Tx environment. The /// hash is that of the block to which the current transaction is being applied. pub fn tx_get_block_hash( - env: &TxEnv, + env: &TxVmEnv, result_ptr: u64, ) -> TxResult<()> where @@ -1483,7 +1542,7 @@ where /// environment. The epoch is that of the block to which the current /// transaction is being applied. pub fn tx_get_block_epoch( - env: &TxEnv, + env: &TxVmEnv, ) -> TxResult where MEM: VmMemory, @@ -1499,9 +1558,9 @@ where /// Getting the chain ID function exposed to the wasm VM VP environment. pub fn vp_get_chain_id( - env: &VpEnv, + env: &VpVmEnv, result_ptr: u64, -) -> vp_env::Result<()> +) -> vp_env::EnvResult<()> where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1523,8 +1582,8 @@ where /// environment. The height is that of the block to which the current /// transaction is being applied. pub fn vp_get_block_height( - env: &VpEnv, -) -> vp_env::Result + env: &VpVmEnv, +) -> vp_env::EnvResult where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1542,7 +1601,7 @@ where /// environment. The time is that of the block header to which the current /// transaction is being applied. pub fn tx_get_block_time( - env: &TxEnv, + env: &TxVmEnv, ) -> TxResult where MEM: VmMemory, @@ -1577,9 +1636,9 @@ where /// Getting the block hash function exposed to the wasm VM VP environment. The /// hash is that of the block to which the current transaction is being applied. pub fn vp_get_block_hash( - env: &VpEnv, + env: &VpVmEnv, result_ptr: u64, -) -> vp_env::Result<()> +) -> vp_env::EnvResult<()> where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1599,9 +1658,9 @@ where /// Getting the transaction hash function exposed to the wasm VM VP environment. pub fn vp_get_tx_code_hash( - env: &VpEnv, + env: &VpVmEnv, result_ptr: u64, -) -> vp_env::Result<()> +) -> vp_env::EnvResult<()> where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1623,8 +1682,8 @@ where /// environment. The epoch is that of the block to which the current /// transaction is being applied. pub fn vp_get_block_epoch( - env: &VpEnv, -) -> vp_env::Result + env: &VpVmEnv, +) -> vp_env::EnvResult where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1640,12 +1699,12 @@ where /// Verify a transaction signature. pub fn vp_verify_tx_signature( - env: &VpEnv, + env: &VpVmEnv, pk_ptr: u64, pk_len: u64, sig_ptr: u64, sig_len: u64, -) -> vp_env::Result +) -> vp_env::EnvResult where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1679,7 +1738,7 @@ where /// printed at the [`tracing::Level::INFO`]. This function is for development /// only. pub fn tx_log_string( - env: &TxEnv, + env: &TxVmEnv, str_ptr: u64, str_len: u64, ) -> TxResult<()> @@ -1697,14 +1756,29 @@ where Ok(()) } +/// Validate a VP WASM code in a tx environment. +fn tx_validate_vp_code( + env: &TxVmEnv, + code: &[u8], +) -> TxResult<()> +where + MEM: VmMemory, + DB: storage::DB + for<'iter> storage::DBIter<'iter>, + H: StorageHasher, + CA: WasmCacheAccess, +{ + tx_add_gas(env, code.len() as u64 * WASM_VALIDATION_GAS_PER_BYTE)?; + validate_untrusted_wasm(code).map_err(TxRuntimeError::InvalidVpCode) +} + /// Evaluate a validity predicate with the given input data. pub fn vp_eval( - env: &VpEnv<'static, MEM, DB, H, EVAL, CA>, + env: &VpVmEnv<'static, MEM, DB, H, EVAL, CA>, vp_code_ptr: u64, vp_code_len: u64, input_data_ptr: u64, input_data_len: u64, -) -> vp_env::Result +) -> vp_env::EnvResult where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1735,10 +1809,10 @@ where /// printed at the [`tracing::Level::INFO`]. This function is for development /// only. pub fn vp_log_string( - env: &VpEnv, + env: &VpVmEnv, str_ptr: u64, str_len: u64, -) -> vp_env::Result<()> +) -> vp_env::EnvResult<()> where MEM: VmMemory, DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -1775,13 +1849,13 @@ pub mod testing { result_buffer: &mut Option>, #[cfg(feature = "wasm-runtime")] vp_wasm_cache: &mut VpCache, #[cfg(feature = "wasm-runtime")] tx_wasm_cache: &mut TxCache, - ) -> TxEnv<'static, NativeMemory, DB, H, CA> + ) -> TxVmEnv<'static, NativeMemory, DB, H, CA> where DB: 'static + storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, CA: WasmCacheAccess, { - TxEnv::new( + TxVmEnv::new( NativeMemory::default(), storage, write_log, @@ -1810,14 +1884,14 @@ pub mod testing { keys_changed: &BTreeSet, eval_runner: &EVAL, #[cfg(feature = "wasm-runtime")] vp_wasm_cache: &mut VpCache, - ) -> VpEnv<'static, NativeMemory, DB, H, EVAL, CA> + ) -> VpVmEnv<'static, NativeMemory, DB, H, EVAL, CA> where DB: 'static + storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, EVAL: VpEvaluator, CA: WasmCacheAccess, { - VpEnv::new( + VpVmEnv::new( NativeMemory::default(), address, storage, diff --git a/shared/src/vm/mod.rs b/shared/src/vm/mod.rs index 88c8803836..2e14666f81 100644 --- a/shared/src/vm/mod.rs +++ b/shared/src/vm/mod.rs @@ -1,5 +1,4 @@ -//! Virtual machine modules for running transactions, validity predicates, -//! matchmaker and matchmaker's filter. +//! Virtual machine modules for running transactions and validity predicates. use std::ffi::c_void; use std::marker::PhantomData; diff --git a/shared/src/vm/types.rs b/shared/src/vm/types.rs index c2a9ef11cb..480190ad08 100644 --- a/shared/src/vm/types.rs +++ b/shared/src/vm/types.rs @@ -29,9 +29,6 @@ pub struct VpInput<'a> { pub verifiers: &'a BTreeSet
, } -/// Input for matchmaker wasm module call -pub type MatchmakerInput = Vec; - /// Key-value pair represents data from account's subspace #[derive(Clone, Debug, BorshSerialize, BorshDeserialize)] pub struct KeyVal { diff --git a/shared/src/vm/wasm/host_env.rs b/shared/src/vm/wasm/host_env.rs index 3736c8a295..769613d42f 100644 --- a/shared/src/vm/wasm/host_env.rs +++ b/shared/src/vm/wasm/host_env.rs @@ -10,11 +10,11 @@ use wasmer::{ use crate::ledger::storage::traits::StorageHasher; use crate::ledger::storage::{self}; -use crate::vm::host_env::{TxEnv, VpEnv, VpEvaluator}; +use crate::vm::host_env::{TxVmEnv, VpEvaluator, VpVmEnv}; use crate::vm::wasm::memory::WasmMemory; use crate::vm::{host_env, WasmCacheAccess}; -impl WasmerEnv for TxEnv<'_, WasmMemory, DB, H, CA> +impl WasmerEnv for TxVmEnv<'_, WasmMemory, DB, H, CA> where DB: storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, @@ -28,7 +28,7 @@ where } } -impl WasmerEnv for VpEnv<'_, WasmMemory, DB, H, EVAL, CA> +impl WasmerEnv for VpVmEnv<'_, WasmMemory, DB, H, EVAL, CA> where DB: storage::DB + for<'iter> storage::DBIter<'iter>, H: StorageHasher, @@ -49,7 +49,7 @@ where pub fn tx_imports( wasm_store: &Store, initial_memory: Memory, - env: TxEnv<'static, WasmMemory, DB, H, CA>, + env: TxVmEnv<'static, WasmMemory, DB, H, CA>, ) -> ImportObject where DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -68,6 +68,7 @@ where "anoma_tx_write_temp" => Function::new_native_with_env(wasm_store, env.clone(), host_env::tx_write_temp), "anoma_tx_delete" => Function::new_native_with_env(wasm_store, env.clone(), host_env::tx_delete), "anoma_tx_iter_prefix" => Function::new_native_with_env(wasm_store, env.clone(), host_env::tx_iter_prefix), + "anoma_tx_rev_iter_prefix" => Function::new_native_with_env(wasm_store, env.clone(), host_env::tx_rev_iter_prefix), "anoma_tx_iter_next" => Function::new_native_with_env(wasm_store, env.clone(), host_env::tx_iter_next), "anoma_tx_insert_verifier" => Function::new_native_with_env(wasm_store, env.clone(), host_env::tx_insert_verifier), "anoma_tx_update_validity_predicate" => Function::new_native_with_env(wasm_store, env.clone(), host_env::tx_update_validity_predicate), @@ -88,7 +89,7 @@ where pub fn vp_imports( wasm_store: &Store, initial_memory: Memory, - env: VpEnv<'static, WasmMemory, DB, H, EVAL, CA>, + env: VpVmEnv<'static, WasmMemory, DB, H, EVAL, CA>, ) -> ImportObject where DB: storage::DB + for<'iter> storage::DBIter<'iter>, @@ -108,6 +109,7 @@ where "anoma_vp_has_key_pre" => Function::new_native_with_env(wasm_store, env.clone(), host_env::vp_has_key_pre), "anoma_vp_has_key_post" => Function::new_native_with_env(wasm_store, env.clone(), host_env::vp_has_key_post), "anoma_vp_iter_prefix" => Function::new_native_with_env(wasm_store, env.clone(), host_env::vp_iter_prefix), + "anoma_vp_rev_iter_prefix" => Function::new_native_with_env(wasm_store, env.clone(), host_env::vp_rev_iter_prefix), "anoma_vp_iter_pre_next" => Function::new_native_with_env(wasm_store, env.clone(), host_env::vp_iter_pre_next), "anoma_vp_iter_post_next" => Function::new_native_with_env(wasm_store, env.clone(), host_env::vp_iter_post_next), "anoma_vp_get_chain_id" => Function::new_native_with_env(wasm_store, env.clone(), host_env::vp_get_chain_id), diff --git a/shared/src/vm/wasm/run.rs b/shared/src/vm/wasm/run.rs index dbfdb4c961..bc2d1b5aae 100644 --- a/shared/src/vm/wasm/run.rs +++ b/shared/src/vm/wasm/run.rs @@ -18,7 +18,7 @@ use crate::proto::Tx; use crate::types::address::Address; use crate::types::internal::HostEnvResult; use crate::types::storage::Key; -use crate::vm::host_env::{TxEnv, VpCtx, VpEnv, VpEvaluator}; +use crate::vm::host_env::{TxVmEnv, VpCtx, VpEvaluator, VpVmEnv}; use crate::vm::prefix_iter::PrefixIterators; use crate::vm::types::VpInput; use crate::vm::wasm::host_env::{tx_imports, vp_imports}; @@ -95,7 +95,7 @@ where let mut verifiers = BTreeSet::new(); let mut result_buffer: Option> = None; - let env = TxEnv::new( + let env = TxVmEnv::new( WasmMemory::default(), storage, write_log, @@ -190,7 +190,7 @@ where cache_access: PhantomData, }; - let env = VpEnv::new( + let env = VpVmEnv::new( WasmMemory::default(), address, storage, @@ -345,7 +345,7 @@ where let keys_changed = unsafe { ctx.keys_changed.get() }; let verifiers = unsafe { ctx.verifiers.get() }; let vp_wasm_cache = unsafe { ctx.vp_wasm_cache.get() }; - let env = VpEnv { + let env = VpVmEnv { memory: WasmMemory::default(), ctx, }; diff --git a/tests/Cargo.toml b/tests/Cargo.toml index 232f95cfed..76377e3df7 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -5,15 +5,16 @@ edition = "2021" license = "GPL-3.0" name = "namada_tests" resolver = "2" -version = "0.7.1" +version = "0.8.1" [features] default = ["wasm-runtime"] wasm-runtime = ["namada/wasm-runtime"] [dependencies] -namada = {path = "../shared", features = ["abciplus", "testing", "ibc-mocks"]} -namada_vm_env = {path = "../vm_env"} +namada = {path = "../shared", features = ["testing", "ibc-mocks"]} +namada_vp_prelude = {path = "../vp_prelude"} +namada_tx_prelude = {path = "../tx_prelude"} chrono = {version = "0.4.22", default-features = false, features = ["clock", "std"]} concat-idents = "1.1.2" prost = "0.9.0" @@ -30,15 +31,14 @@ namada_apps = {path = "../apps", default-features = false, features = ["abciplus assert_cmd = "1.0.7" borsh = "0.9.1" color-eyre = "0.5.11" +data-encoding = "2.3.2" # NOTE: enable "print" feature to see output from builds ran by e2e tests escargot = {version = "0.5.7"} # , features = ["print"]} -expectrl = {version = "=0.5.2"} +expectrl = "0.6.0" eyre = "0.6.5" file-serve = "0.2.0" fs_extra = "1.2.0" -hex = "0.4.3" itertools = "0.10.0" -libp2p = "0.38.0" pretty_assertions = "0.7.2" # A fork with state machine testing proptest = {git = "https://github.com/heliaxdev/proptest", branch = "tomas/sm"} diff --git a/tests/proptest-regressions/native_vp/pos.txt b/tests/proptest-regressions/native_vp/pos.txt new file mode 100644 index 0000000000..ad157e817b --- /dev/null +++ b/tests/proptest-regressions/native_vp/pos.txt @@ -0,0 +1,2 @@ +cc 65720acc67508ccd2fefc1ca42477075ae53a7d1e3c8f31324cfb8f06587457e +cc 45b2dd2ed9619ceef6135ee6ca34406621c8a6429ffa153bbda3ce79dd4e006c \ No newline at end of file diff --git a/tests/proptest-regressions/storage_api/collections/lazy_map.txt b/tests/proptest-regressions/storage_api/collections/lazy_map.txt new file mode 100644 index 0000000000..2de7510923 --- /dev/null +++ b/tests/proptest-regressions/storage_api/collections/lazy_map.txt @@ -0,0 +1,7 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc 59b8eaaf5d8e03e58b346ef229a2487f68fea488197420f150682f7275ce2b83 # shrinks to (initial_state, transitions) = (AbstractLazyMapState { valid_transitions: [], committed_transitions: [] }, [Insert(11178241982156558453, TestVal { x: 9618691367534591266, y: true }), CommitTx, Update(11178241982156558453, TestVal { x: 2635377083098935189, y: false }), Update(11178241982156558453, TestVal { x: 11485387163946255361, y: false }), Insert(4380901092919801530, TestVal { x: 17235291421018840542, y: false }), Update(11178241982156558453, TestVal { x: 1936190700145956620, y: false }), Update(11178241982156558453, TestVal { x: 6934621224353358508, y: false }), Update(11178241982156558453, TestVal { x: 16175036327810390362, y: true }), Remove(5606457884982633480), Insert(7124206407862523505, TestVal { x: 5513772825695605555, y: true }), CommitTxAndBlock, CommitTx, Insert(13347045100814804679, TestVal { x: 5157295776286367034, y: false }), Update(7124206407862523505, TestVal { x: 1989909525753197955, y: false }), Update(4380901092919801530, TestVal { x: 13085578877588425331, y: false }), Update(7124206407862523505, TestVal { x: 1620781139263176467, y: true }), Insert(5806457332157050619, TestVal { x: 14632354209749334932, y: true }), Remove(1613213961397167063), Update(7124206407862523505, TestVal { x: 3848976302483310370, y: true }), Update(4380901092919801530, TestVal { x: 15281186775251770467, y: false }), Remove(5303306623647571548), Insert(5905425607805327902, TestVal { x: 1274794101048822414, y: false }), Insert(2305446651611241243, TestVal { x: 7872403441503057017, y: true }), Insert(2843165193114615911, TestVal { x: 13698490566286768452, y: false }), Insert(3364298091459048760, TestVal { x: 8891279000465212397, y: true }), CommitTx, Insert(17278527568142155478, TestVal { x: 8166151895050476136, y: false }), Remove(9206713523174765253), Remove(1148985045479283759), Insert(13346103305566843535, TestVal { x: 13148026974798633058, y: true }), Remove(17185699086139524651), CommitTx, Update(7124206407862523505, TestVal { x: 3047872255943216792, y: false }), CommitTxAndBlock, CommitTxAndBlock, Remove(4672009405538026945), Update(5905425607805327902, TestVal { x: 6635343936644805461, y: false }), Insert(14100441716981493843, TestVal { x: 8068697312326956479, y: true }), Insert(8370580326875672309, TestVal { x: 18416630552728813406, y: false }), Update(2305446651611241243, TestVal { x: 3777718192999015176, y: false }), Remove(1532142753559370584), Remove(10097030807802775125), Insert(10080356901530935857, TestVal { x: 17171047520093964037, y: false }), Update(3364298091459048760, TestVal { x: 702372485798608773, y: true }), Insert(5504969092734638033, TestVal { x: 314752460808087203, y: true }), Remove(5486040497128339175), Insert(7884678026881625058, TestVal { x: 4313610278903495077, y: true }), CommitTx, Insert(11228024342874184864, TestVal { x: 428512502841968552, y: false }), Insert(4684666745142518471, TestVal { x: 13122515680485564107, y: true }), Remove(14243063045921130600), Remove(4530767959521683042), Insert(10236349778753659715, TestVal { x: 3138294567956031715, y: true }), Update(2305446651611241243, TestVal { x: 8133236604817109805, y: false }), Update(2843165193114615911, TestVal { x: 12001998927296899868, y: false }), CommitTxAndBlock, CommitTx, CommitTxAndBlock]) diff --git a/tests/proptest-regressions/storage_api/collections/lazy_vec.txt b/tests/proptest-regressions/storage_api/collections/lazy_vec.txt new file mode 100644 index 0000000000..97a16dcbeb --- /dev/null +++ b/tests/proptest-regressions/storage_api/collections/lazy_vec.txt @@ -0,0 +1,7 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc 4330a283e32b5ff3f38d0af2298e1e98c30b1901c1027b572070a1af3356688e # shrinks to (initial_state, transitions) = (AbstractLazyVecState { valid_transitions: [], committed_transitions: [] }, [Push(TestVecItem { x: 15352583996758053781, y: true }), Pop, CommitTx, Push(TestVecItem { x: 6904067244182623445, y: false }), CommitTx, Pop, Push(TestVecItem { x: 759762287021483883, y: true }), Push(TestVecItem { x: 7885704082671389345, y: true }), Pop, Pop, Push(TestVecItem { x: 2762344561419437403, y: false }), Push(TestVecItem { x: 11448034977049028254, y: false }), Update { index: 0, value: TestVecItem { x: 7097339541298715775, y: false } }, Pop, Pop, Push(TestVecItem { x: 457884036257686887, y: true }), CommitTx, Push(TestVecItem { x: 17719281119971095810, y: true }), CommitTx, Push(TestVecItem { x: 4612681906563857058, y: false }), CommitTx, CommitTx, Pop, CommitTx, Pop, Push(TestVecItem { x: 4269537158299505726, y: false }), CommitTx, Pop, Pop, CommitTx, CommitTx, CommitTx, CommitTx, Push(TestVecItem { x: 9020889554694833528, y: true }), Push(TestVecItem { x: 4022797489860699620, y: false }), Update { index: 0, value: TestVecItem { x: 6485081152860611495, y: true } }, Pop, CommitTx, Push(TestVecItem { x: 14470031031894733310, y: false }), Push(TestVecItem { x: 1113274973965556867, y: true }), Push(TestVecItem { x: 4122902042678339346, y: false }), Push(TestVecItem { x: 9672639635189564637, y: true }), Pop, Pop, Pop, CommitTx, Update { index: 0, value: TestVecItem { x: 6372193991838429158, y: false } }, Push(TestVecItem { x: 15140852824102579010, y: false }), Pop, Pop, Pop, Push(TestVecItem { x: 4012218522073776592, y: false }), Push(TestVecItem { x: 10637893847792386454, y: true }), Push(TestVecItem { x: 3357788278949652885, y: false }), CommitTx, CommitTx, Pop, Pop, CommitTx, Pop, Push(TestVecItem { x: 11768518086398350214, y: true }), Push(TestVecItem { x: 4361685178396183644, y: true }), Pop, CommitTx, Push(TestVecItem { x: 2450907664540456425, y: false }), Push(TestVecItem { x: 18184919885943118586, y: true }), Update { index: 1, value: TestVecItem { x: 10611906658537706503, y: false } }, Push(TestVecItem { x: 4887827541279511396, y: false }), Update { index: 0, value: TestVecItem { x: 13021774003761931172, y: false } }, Push(TestVecItem { x: 3644118228573898014, y: false }), CommitTx, Update { index: 0, value: TestVecItem { x: 1276840798381751183, y: false } }, Pop, Pop]) diff --git a/tests/proptest-regressions/storage_api/collections/nested_lazy_map.txt b/tests/proptest-regressions/storage_api/collections/nested_lazy_map.txt new file mode 100644 index 0000000000..d587a9680e --- /dev/null +++ b/tests/proptest-regressions/storage_api/collections/nested_lazy_map.txt @@ -0,0 +1,7 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc b5ce7502439712f95a4b50de0d5455e0a6788cc95dbd535e749d5717da0ee8e1 # shrinks to (initial_state, transitions) = (AbstractLazyMapState { valid_transitions: [], committed_transitions: [] }, [Insert((22253647846329582, -2060910714, -85), TestVal { x: 16862967849328560500, y: true })]) diff --git a/tests/src/e2e.rs b/tests/src/e2e.rs index a9eb2b2cf5..0afc343098 100644 --- a/tests/src/e2e.rs +++ b/tests/src/e2e.rs @@ -12,7 +12,6 @@ //! `ANOMA_E2E_KEEP_TEMP=true`. pub mod eth_bridge_tests; -pub mod gossip_tests; pub mod helpers; pub mod ledger_tests; pub mod setup; diff --git a/tests/src/e2e/gossip_tests.rs b/tests/src/e2e/gossip_tests.rs deleted file mode 100644 index 5da19c0758..0000000000 --- a/tests/src/e2e/gossip_tests.rs +++ /dev/null @@ -1,348 +0,0 @@ -//! By default, these tests will run in release mode. This can be disabled -//! by setting environment variable `ANOMA_E2E_DEBUG=true`. For debugging, -//! you'll typically also want to set `RUST_BACKTRACE=1`, e.g.: -//! -//! ```ignore,shell -//! ANOMA_E2E_DEBUG=true RUST_BACKTRACE=1 cargo test e2e::gossip_tests -- --test-threads=1 --nocapture -//! ``` -//! -//! To keep the temporary files created by a test, use env var -//! `ANOMA_E2E_KEEP_TEMP=true`. - -use std::env; -use std::fs::OpenOptions; -use std::path::PathBuf; - -use color_eyre::eyre::Result; -use escargot::CargoBuild; -use serde_json::json; -use setup::constants::*; - -use super::setup::ENV_VAR_DEBUG; -use crate::e2e::helpers::{ - find_address, get_actor_rpc, get_gossiper_mm_server, -}; -use crate::e2e::setup::{self, Bin, Who}; -use crate::{run, run_as}; - -/// Test that when we "run-gossip" a peer with no seeds should fail -/// bootstrapping kademlia. A peer with a seed should be able to -/// bootstrap kademia and connect to the other peer. -/// In this test we: -/// 1. Check that a gossip node can start and stop cleanly -/// 2. Check that two peers connected to the same seed node discover each other -#[test] -#[ignore] // this is not currently being developed, run with `cargo test -- --ignored` -fn run_gossip() -> Result<()> { - let test = - setup::network(|genesis| setup::add_validators(2, genesis), None)?; - - // 1. Start the first gossip node and then stop it - let mut node_0 = - run_as!(test, Who::Validator(0), Bin::Node, &["gossip"], Some(40))?; - node_0.send_control('c')?; - node_0.exp_eof()?; - drop(node_0); - - // 2. Check that two peers connected to the same seed node discover each - // other. Start the first gossip node again (the seed node). - let mut node_0 = - run_as!(test, Who::Validator(0), Bin::Node, &["gossip"], Some(40))?; - let (_unread, matched) = node_0.exp_regex(r"Peer id: PeerId\(.*\)")?; - let node_0_peer_id = matched - .trim() - .rsplit_once('\"') - .unwrap() - .0 - .rsplit_once('\"') - .unwrap() - .1; - let _bg_node_0 = node_0.background(); - - // Start the second gossip node (a peer node) - let mut node_1 = - run_as!(test, Who::Validator(1), Bin::Node, &["gossip"], Some(40))?; - - let (_unread, matched) = node_1.exp_regex(r"Peer id: PeerId\(.*\)")?; - let node_1_peer_id = matched - .trim() - .rsplit_once('\"') - .unwrap() - .0 - .rsplit_once('\"') - .unwrap() - .1; - node_1.exp_string(&format!( - "Connect to a new peer: PeerId(\"{}\")", - node_0_peer_id - ))?; - let _bg_node_1 = node_1.background(); - - // Start the third gossip node (another peer node) - let mut node_2 = - run_as!(test, Who::Validator(2), Bin::Node, &["gossip"], Some(20))?; - // The third node should connect to node 1 via Identify and Kademlia peer - // discovery protocol - node_2.exp_string(&format!( - "Connect to a new peer: PeerId(\"{}\")", - node_1_peer_id - ))?; - node_2.exp_string(&format!("Identified Peer {}", node_1_peer_id))?; - node_2 - .exp_string(&format!("Routing updated peer ID: {}", node_1_peer_id))?; - - Ok(()) -} - -/// This test runs a ledger node and 2 gossip nodes. It then crafts 3 intents -/// and sends them to the matchmaker. The matchmaker should be able to match -/// them into a transfer transaction and submit it to the ledger. -#[test] -#[ignore] // this is not currently being developed, run with `cargo test -- --ignored` -fn match_intents() -> Result<()> { - let test = setup::single_node_net()?; - - // Make sure that the default matchmaker is built - println!("Building the matchmaker \"mm_token_exch\" implementation..."); - let run_debug = match env::var(ENV_VAR_DEBUG) { - Ok(val) => val.to_ascii_lowercase() != "false", - _ => false, - }; - let manifest_path = test - .working_dir - .join("matchmaker") - .join("mm_token_exch") - .join("Cargo.toml"); - let cmd = CargoBuild::new().manifest_path(manifest_path); - let cmd = if run_debug { cmd } else { cmd.release() }; - let msgs = cmd.exec().unwrap(); - for msg in msgs { - msg.unwrap(); - } - println!("Done building the matchmaker."); - - let mut ledger = - run_as!(test, Who::Validator(0), Bin::Node, &["ledger"], Some(40))?; - ledger.exp_string("Anoma ledger node started")?; - ledger.exp_string("No state could be found")?; - // Wait to commit a block - ledger.exp_regex(r"Committed block hash.*, height: [0-9]+")?; - let bg_ledger = ledger.background(); - - let intent_a_path_input = test.test_dir.path().join("intent.A.data"); - let intent_b_path_input = test.test_dir.path().join("intent.B.data"); - let intent_c_path_input = test.test_dir.path().join("intent.C.data"); - - let albert = find_address(&test, ALBERT)?; - let bertha = find_address(&test, BERTHA)?; - let christel = find_address(&test, CHRISTEL)?; - let xan = find_address(&test, XAN)?; - let btc = find_address(&test, BTC)?; - let eth = find_address(&test, ETH)?; - let intent_a_json = json!([ - { - "key": bertha, - "addr": bertha, - "min_buy": "100.0", - "max_sell": "70", - "token_buy": xan, - "token_sell": btc, - "rate_min": "2", - "vp_path": test.working_dir.join(VP_ALWAYS_TRUE_WASM).to_string_lossy().into_owned(), - } - ]); - - let intent_b_json = json!([ - { - "key": albert, - "addr": albert, - "min_buy": "50", - "max_sell": "300", - "token_buy": btc, - "token_sell": eth, - "rate_min": "0.7" - } - ]); - let intent_c_json = json!([ - { - "key": christel, - "addr": christel, - "min_buy": "20", - "max_sell": "200", - "token_buy": eth, - "token_sell": xan, - "rate_min": "0.5" - } - ]); - generate_intent_json(intent_a_path_input.clone(), intent_a_json); - generate_intent_json(intent_b_path_input.clone(), intent_b_json); - generate_intent_json(intent_c_path_input.clone(), intent_c_json); - - let validator_one_rpc = get_actor_rpc(&test, &Who::Validator(0)); - let validator_one_gossiper = - get_gossiper_mm_server(&test, &Who::Validator(0)); - - // The RPC port starts at 27660 (see `setup::network`) - let rpc_port = 27660; - let rpc_address = format!("127.0.0.1:{}", rpc_port); - - // Start intent gossiper node - let mut gossiper = run_as!( - test, - Who::Validator(0), - Bin::Node, - &["gossip", "--rpc", &rpc_address], - Some(20) - )?; - - // Wait gossip to start - gossiper.exp_string(&format!("RPC started at {}", rpc_address))?; - let _bg_gossiper = gossiper.background(); - - // Start matchmaker - let mut matchmaker = run_as!( - test, - Who::Validator(0), - Bin::Node, - &[ - "matchmaker", - "--source", - "matchmaker", - "--signing-key", - "matchmaker-key", - "--ledger-address", - &validator_one_rpc, - "--intent-gossiper", - &validator_one_gossiper, - ], - Some(40) - )?; - - // Wait for the matchmaker to start - matchmaker.exp_string("Connected to the server")?; - let bg_matchmaker = matchmaker.background(); - - let rpc_address = format!("http://{}", rpc_address); - // Send intent A - let mut session_send_intent_a = run!( - test, - Bin::Client, - &[ - "intent", - "--node", - &rpc_address, - "--data-path", - intent_a_path_input.to_str().unwrap(), - "--topic", - "asset_v1", - "--signing-key", - BERTHA_KEY, - "--ledger-address", - &validator_one_rpc - ], - Some(40), - )?; - - // means it sent it correctly but not able to gossip it (which is - // correct since there is only 1 node) - session_send_intent_a.exp_string( - "Failed to publish intent in gossiper: InsufficientPeers", - )?; - drop(session_send_intent_a); - - let mut matchmaker = bg_matchmaker.foreground(); - matchmaker.exp_string("trying to match new intent")?; - let bg_matchmaker = matchmaker.background(); - - // Send intent B - let mut session_send_intent_b = run!( - test, - Bin::Client, - &[ - "intent", - "--node", - &rpc_address, - "--data-path", - intent_b_path_input.to_str().unwrap(), - "--topic", - "asset_v1", - "--signing-key", - ALBERT_KEY, - "--ledger-address", - &validator_one_rpc - ], - Some(40), - )?; - - // means it sent it correctly but not able to gossip it (which is - // correct since there is only 1 node) - session_send_intent_b.exp_string( - "Failed to publish intent in gossiper: InsufficientPeers", - )?; - drop(session_send_intent_b); - - let mut matchmaker = bg_matchmaker.foreground(); - matchmaker.exp_string("trying to match new intent")?; - let bg_matchmaker = matchmaker.background(); - - // Send intent C - let mut session_send_intent_c = run!( - test, - Bin::Client, - &[ - "intent", - "--node", - &rpc_address, - "--data-path", - intent_c_path_input.to_str().unwrap(), - "--topic", - "asset_v1", - "--signing-key", - CHRISTEL_KEY, - "--ledger-address", - &validator_one_rpc - ], - Some(40), - )?; - - // means it sent it correctly but not able to gossip it (which is - // correct since there is only 1 node) - session_send_intent_c.exp_string( - "Failed to publish intent in gossiper: InsufficientPeers", - )?; - drop(session_send_intent_c); - - // check that the transfers transactions are correct - let mut matchmaker = bg_matchmaker.foreground(); - matchmaker.exp_string(&format!( - "crafting transfer: {}, {}, 70", - bertha, albert - ))?; - matchmaker.exp_string(&format!( - "crafting transfer: {}, {}, 200", - christel, bertha - ))?; - matchmaker.exp_string(&format!( - "crafting transfer: {}, {}, 100", - albert, christel - ))?; - - // check that the all VPs accept the transaction - let mut ledger = bg_ledger.foreground(); - ledger.exp_string("all VPs accepted transaction")?; - - Ok(()) -} - -fn generate_intent_json( - intent_path: PathBuf, - exchange_json: serde_json::Value, -) { - let intent_writer = OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(intent_path) - .unwrap(); - serde_json::to_writer(intent_writer, &exchange_json).unwrap(); -} diff --git a/tests/src/e2e/helpers.rs b/tests/src/e2e/helpers.rs index cc0c45cf8c..705c822760 100644 --- a/tests/src/e2e/helpers.rs +++ b/tests/src/e2e/helpers.rs @@ -42,7 +42,7 @@ pub fn find_address(test: &Test, alias: impl AsRef) -> Result
{ Ok(address) } -/// Find the address of the intent gossiper node's RPC endpoint. +/// Find the address of the node's RPC endpoint. pub fn get_actor_rpc(test: &Test, who: &Who) -> String { let base_dir = test.get_base_dir(who); let tendermint_mode = match who { @@ -54,18 +54,6 @@ pub fn get_actor_rpc(test: &Test, who: &Who) -> String { config.ledger.tendermint.rpc_address.to_string() } -/// Find the address of the intent gossiper node's matchmakers server. -pub fn get_gossiper_mm_server(test: &Test, who: &Who) -> String { - let base_dir = test.get_base_dir(who); - let tendermint_mode = match who { - Who::NonValidator => TendermintMode::Full, - Who::Validator(_) => TendermintMode::Validator, - }; - let config = - Config::load(&base_dir, &test.net.chain_id, Some(tendermint_mode)); - config.intent_gossiper.matchmakers_server_addr.to_string() -} - /// Find the address of an account by its alias from the wallet #[allow(dead_code)] pub fn find_keypair( diff --git a/tests/src/e2e/ledger_tests.rs b/tests/src/e2e/ledger_tests.rs index b4dca67fbe..45bf8392ae 100644 --- a/tests/src/e2e/ledger_tests.rs +++ b/tests/src/e2e/ledger_tests.rs @@ -9,14 +9,13 @@ //! To keep the temporary files created by a test, use env var //! `ANOMA_E2E_KEEP_TEMP=true`. -use std::fs::{self, OpenOptions}; -use std::path::PathBuf; use std::process::Command; use std::sync::Arc; use std::time::{Duration, Instant}; use borsh::BorshSerialize; use color_eyre::eyre::Result; +use data_encoding::HEXLOWER; use namada::types::token; use namada_apps::config::ethereum_bridge; use namada_apps::config::genesis::genesis_config::{ @@ -25,7 +24,7 @@ use namada_apps::config::genesis::genesis_config::{ use serde_json::json; use setup::constants::*; -use super::setup::{disable_eth_fullnode, working_dir}; +use super::setup::{disable_eth_fullnode, get_all_wasms_hashes}; use crate::e2e::helpers::{ find_address, find_voting_power, get_actor_rpc, get_epoch, }; @@ -56,7 +55,7 @@ fn run_ledger() -> Result<()> { let mut ledger = run_as!(test, Who::NonValidator, Bin::Node, args, Some(40))?; ledger.exp_string("Anoma ledger node started")?; - ledger.exp_string("This node is a fullnode")?; + ledger.exp_string("This node is not a validator")?; } Ok(()) @@ -90,12 +89,12 @@ fn test_node_connectivity() -> Result<()> { let mut non_validator = run_as!(test, Who::NonValidator, Bin::Node, args, Some(40))?; non_validator.exp_string("Anoma ledger node started")?; - non_validator.exp_string("This node is a fullnode")?; + non_validator.exp_string("This node is not a validator")?; non_validator.exp_string("Starting RPC HTTP server on")?; let bg_validator_0 = validator_0.background(); let bg_validator_1 = validator_1.background(); - let bg_non_validator = non_validator.background(); + let _bg_non_validator = non_validator.background(); // 2. Submit a valid token transfer tx let validator_one_rpc = get_actor_rpc(&test, &Who::Validator(0)); @@ -123,15 +122,39 @@ fn test_node_connectivity() -> Result<()> { client.exp_string("Transaction is valid.")?; client.assert_success(); - // 3. Check that all the nodes processed the tx with the same result + // 3. Check that all the nodes processed the tx and report the same balance + let mut validator_0 = bg_validator_0.foreground(); let mut validator_1 = bg_validator_1.foreground(); - let mut non_validator = bg_non_validator.foreground(); - let expected_result = "all VPs accepted transaction"; + // We cannot check this on non-validator node as it might sync without + // applying the tx itself, but its state should be the same, checked below. validator_0.exp_string(expected_result)?; validator_1.exp_string(expected_result)?; - non_validator.exp_string(expected_result)?; + let _bg_validator_0 = validator_0.background(); + let _bg_validator_1 = validator_1.background(); + + let query_balance_args = |ledger_rpc| { + vec![ + "balance", + "--owner", + ALBERT, + "--token", + XAN, + "--ledger-address", + ledger_rpc, + ] + }; + + let validator_0_rpc = get_actor_rpc(&test, &Who::Validator(0)); + let validator_1_rpc = get_actor_rpc(&test, &Who::Validator(1)); + let non_validator_rpc = get_actor_rpc(&test, &Who::NonValidator); + for ledger_rpc in &[validator_0_rpc, validator_1_rpc, non_validator_rpc] { + let mut client = + run!(test, Bin::Client, query_balance_args(ledger_rpc), Some(40))?; + client.exp_string("XAN: 1000010.1")?; + client.assert_success(); + } Ok(()) } @@ -404,7 +427,7 @@ fn ledger_txs_and_queries() -> Result<()> { &validator_one_rpc, ], // expect hex encoded of borsh encoded bytes - hex::encode(christel_balance.try_to_vec().unwrap()), + HEXLOWER.encode(&christel_balance.try_to_vec().unwrap()), ), ]; for (query_args, expected) in &query_args_and_expected_response { @@ -1031,7 +1054,33 @@ fn ledger_many_txs_in_a_block() -> Result<()> { /// 13. Check governance address funds are 0 #[test] fn proposal_submission() -> Result<()> { - let test = setup::network(|genesis| genesis, None)?; + let working_dir = setup::working_dir(); + + let test = setup::network( + |genesis| { + let parameters = ParametersConfig { + min_num_of_blocks: 1, + min_duration: 1, + max_expected_time_per_block: 1, + vp_whitelist: Some(get_all_wasms_hashes( + &working_dir, + Some("vp_"), + )), + // Enable tx whitelist to test the execution of a + // non-whitelisted tx by governance + tx_whitelist: Some(get_all_wasms_hashes( + &working_dir, + Some("tx_"), + )), + }; + + GenesisConfig { + parameters, + ..genesis + } + }, + None, + )?; disable_eth_fullnode(&test, &test.net.chain_id, &Who::Validator(0)); @@ -1074,8 +1123,6 @@ fn proposal_submission() -> Result<()> { client.assert_success(); // 2. Submit valid proposal - let valid_proposal_json_path = - test.test_dir.path().join("valid_proposal.json"); let proposal_code = wasm_abs_path(TX_PROPOSAL_CODE); let albert = find_address(&test, ALBERT)?; @@ -1093,16 +1140,17 @@ fn proposal_submission() -> Result<()> { "requires": "2" }, "author": albert, - "voting_start_epoch": 6, - "voting_end_epoch": 18, - "grace_epoch": 24, + "voting_start_epoch": 12_u64, + "voting_end_epoch": 24_u64, + "grace_epoch": 30_u64, "proposal_code_path": proposal_code.to_str().unwrap() } ); - - generate_proposal_json( - valid_proposal_json_path.clone(), - valid_proposal_json, + let valid_proposal_json_path = + test.test_dir.path().join("valid_proposal.json"); + generate_proposal_json_file( + valid_proposal_json_path.as_path(), + &valid_proposal_json, ); let validator_one_rpc = get_actor_rpc(&test, &Who::Validator(0)); @@ -1120,7 +1168,6 @@ fn proposal_submission() -> Result<()> { client.assert_success(); // 3. Query the proposal - let proposal_query_args = vec![ "query-proposal", "--proposal-id", @@ -1165,8 +1212,6 @@ fn proposal_submission() -> Result<()> { // 6. Submit an invalid proposal // proposal is invalid due to voting_end_epoch - voting_start_epoch < 3 - let invalid_proposal_json_path = - test.test_dir.path().join("invalid_proposal.json"); let albert = find_address(&test, ALBERT)?; let invalid_proposal_json = json!( { @@ -1197,14 +1242,16 @@ fn proposal_submission() -> Result<()> { eros.", "requires": "2" }, "author": albert, - "voting_start_epoch": 9999, - "voting_end_epoch": 10000, - "grace_epoch": 10009, + "voting_start_epoch": 9999_u64, + "voting_end_epoch": 10000_u64, + "grace_epoch": 10009_u64, } ); - generate_proposal_json( - invalid_proposal_json_path.clone(), - invalid_proposal_json, + let invalid_proposal_json_path = + test.test_dir.path().join("invalid_proposal.json"); + generate_proposal_json_file( + invalid_proposal_json_path.as_path(), + &invalid_proposal_json, ); let submit_proposal_args = vec![ @@ -1215,8 +1262,12 @@ fn proposal_submission() -> Result<()> { &validator_one_rpc, ]; let mut client = run!(test, Bin::Client, submit_proposal_args, Some(40))?; - client.exp_string("Transaction is invalid.")?; - client.assert_success(); + client.exp_string( + "Invalid proposal end epoch: difference between proposal start and \ + end epoch must be at least 3 and at max 27 and end epoch must be a \ + multiple of 3", + )?; + client.assert_failure(); // 7. Check invalid proposal was not accepted let proposal_query_args = vec![ @@ -1248,7 +1299,7 @@ fn proposal_submission() -> Result<()> { // 9. Send a yay vote from a validator let mut epoch = get_epoch(&test, &validator_one_rpc).unwrap(); - while epoch.0 <= 7 { + while epoch.0 <= 13 { sleep(1); epoch = get_epoch(&test, &validator_one_rpc).unwrap(); } @@ -1316,7 +1367,7 @@ fn proposal_submission() -> Result<()> { // 11. Query the proposal and check the result let mut epoch = get_epoch(&test, &validator_one_rpc).unwrap(); - while epoch.0 <= 19 { + while epoch.0 <= 25 { sleep(1); epoch = get_epoch(&test, &validator_one_rpc).unwrap(); } @@ -1335,7 +1386,7 @@ fn proposal_submission() -> Result<()> { // 12. Wait proposal grace and check proposal author funds let mut epoch = get_epoch(&test, &validator_one_rpc).unwrap(); - while epoch.0 < 26 { + while epoch.0 < 31 { sleep(1); epoch = get_epoch(&test, &validator_one_rpc).unwrap(); } @@ -1378,7 +1429,7 @@ fn proposal_submission() -> Result<()> { let mut client = run!(test, Bin::Client, query_protocol_parameters, Some(30))?; - client.exp_regex(".*Min. proposal grace epoch: 9.*")?; + client.exp_regex(".*Min. proposal grace epochs: 9.*")?; client.assert_success(); Ok(()) @@ -1427,9 +1478,7 @@ fn proposal_offline() -> Result<()> { client.exp_string("Transaction is valid.")?; client.assert_success(); - // 2. Create an offline proposal - let valid_proposal_json_path = - test.test_dir.path().join("valid_proposal.json"); + // 2. Create an offline let albert = find_address(&test, ALBERT)?; let valid_proposal_json = json!( { @@ -1445,14 +1494,16 @@ fn proposal_offline() -> Result<()> { "requires": "2" }, "author": albert, - "voting_start_epoch": 3, - "voting_end_epoch": 6, - "grace_epoch": 6 + "voting_start_epoch": 3_u64, + "voting_end_epoch": 9_u64, + "grace_epoch": 18_u64 } ); - generate_proposal_json( - valid_proposal_json_path.clone(), - valid_proposal_json, + let valid_proposal_json_path = + test.test_dir.path().join("valid_proposal.json"); + generate_proposal_json_file( + valid_proposal_json_path.as_path(), + &valid_proposal_json, ); let validator_one_rpc = get_actor_rpc(&test, &Who::Validator(0)); @@ -1472,17 +1523,17 @@ fn proposal_offline() -> Result<()> { // 3. Generate an offline yay vote let mut epoch = get_epoch(&test, &validator_one_rpc).unwrap(); - while epoch.0 <= 5 { + while epoch.0 <= 2 { sleep(1); epoch = get_epoch(&test, &validator_one_rpc).unwrap(); } - let proposal_path = working_dir().join("proposal"); - let proposal_ref = proposal_path.to_string_lossy(); + let proposal_path = test.test_dir.path().join("proposal"); + let submit_proposal_vote = vec![ "vote-proposal", "--data-path", - &proposal_ref, + proposal_path.to_str().unwrap(), "--vote", "yay", "--signer", @@ -1497,31 +1548,14 @@ fn proposal_offline() -> Result<()> { client.assert_success(); let expected_file_name = format!("proposal-vote-{}", albert); - let expected_path_vote = working_dir().join(&expected_file_name); + let expected_path_vote = test.test_dir.path().join(&expected_file_name); assert!(expected_path_vote.exists()); - let expected_path_proposal = working_dir().join("proposal"); - assert!(expected_path_proposal.exists()); - // 4. Compute offline tally - let proposal_data_folder = working_dir().join("proposal-test-data"); - fs::create_dir_all(&proposal_data_folder) - .expect("Should create a new folder."); - fs::copy( - expected_path_proposal, - &proposal_data_folder.join("proposal"), - ) - .expect("Should copy proposal file."); - fs::copy( - expected_path_vote, - &proposal_data_folder.join(&expected_file_name), - ) - .expect("Should copy proposal vote file."); - let tally_offline = vec![ "query-proposal-result", "--data-path", - proposal_data_folder.to_str().unwrap(), + test.test_dir.path().to_str().unwrap(), "--offline", "--ledger-address", &validator_one_rpc, @@ -1534,17 +1568,18 @@ fn proposal_offline() -> Result<()> { Ok(()) } -fn generate_proposal_json( - proposal_path: PathBuf, - proposal_content: serde_json::Value, +fn generate_proposal_json_file( + proposal_path: &std::path::Path, + proposal_content: &serde_json::Value, ) { - let intent_writer = OpenOptions::new() + let intent_writer = std::fs::OpenOptions::new() .create(true) .write(true) .truncate(true) .open(proposal_path) .unwrap(); - serde_json::to_writer(intent_writer, &proposal_content).unwrap(); + + serde_json::to_writer(intent_writer, proposal_content).unwrap(); } /// In this test we: @@ -1671,7 +1706,7 @@ fn test_genesis_validators() -> Result<()> { // 2. Initialize a new network with the 2 validators let mut genesis = genesis_config::open_genesis_config( working_dir.join(setup::SINGLE_NODE_NET_GENESIS), - ); + )?; let update_validator_config = |ix: u8, mut config: genesis_config::ValidatorConfig| { // Setup tokens balances and validity predicates @@ -1778,6 +1813,7 @@ fn test_genesis_validators() -> Result<()> { chain_id.as_str(), "--pre-genesis-path", pre_genesis_path.as_ref(), + "--dont-prefetch-wasm", ], Some(5) )?; @@ -1795,6 +1831,7 @@ fn test_genesis_validators() -> Result<()> { chain_id.as_str(), "--pre-genesis-path", pre_genesis_path.as_ref(), + "--dont-prefetch-wasm", ], Some(5) )?; @@ -1859,7 +1896,7 @@ fn test_genesis_validators() -> Result<()> { let mut non_validator = run_as!(test, Who::NonValidator, Bin::Node, args, Some(40))?; non_validator.exp_string("Anoma ledger node started")?; - non_validator.exp_string("This node is a fullnode")?; + non_validator.exp_string("This node is not a validator")?; non_validator.exp_string("Starting RPC HTTP server on")?; let bg_validator_0 = validator_0.background(); @@ -1905,3 +1942,144 @@ fn test_genesis_validators() -> Result<()> { Ok(()) } + +/// In this test we intentionally make a validator node double sign blocks +/// to test that slashing evidence is received and processed by the ledger +/// correctly: +/// 1. Run 2 genesis validator ledger nodes +/// 2. Copy the first genesis validator base-dir +/// 3. Increment its ports and generate new node ID to avoid conflict +/// 4. Run it to get it to double vote and sign blocks +/// 5. Submit a valid token transfer tx to validator 0 +/// 6. Wait for double signing evidence +#[test] +fn double_signing_gets_slashed() -> Result<()> { + use std::net::SocketAddr; + use std::str::FromStr; + + use namada::types::key::{self, ed25519, SigScheme}; + use namada_apps::client; + use namada_apps::config::Config; + + // Setup 2 genesis validator nodes + let test = + setup::network(|genesis| setup::add_validators(1, genesis), None)?; + + disable_eth_fullnode(&test, &test.net.chain_id, &Who::Validator(0)); + disable_eth_fullnode(&test, &test.net.chain_id, &Who::Validator(1)); + + // 1. Run 2 genesis validator ledger nodes + let args = ["ledger"]; + let mut validator_0 = + run_as!(test, Who::Validator(0), Bin::Node, args, Some(40))?; + validator_0.exp_string("Anoma ledger node started")?; + validator_0.exp_string("This node is a validator")?; + let _bg_validator_0 = validator_0.background(); + let mut validator_1 = + run_as!(test, Who::Validator(1), Bin::Node, args, Some(40))?; + validator_1.exp_string("Anoma ledger node started")?; + validator_1.exp_string("This node is a validator")?; + let bg_validator_1 = validator_1.background(); + + // 2. Copy the first genesis validator base-dir + let validator_0_base_dir = test.get_base_dir(&Who::Validator(0)); + let validator_0_base_dir_copy = + test.test_dir.path().join("validator-0-copy"); + fs_extra::dir::copy( + &validator_0_base_dir, + &validator_0_base_dir_copy, + &fs_extra::dir::CopyOptions { + copy_inside: true, + ..Default::default() + }, + ) + .unwrap(); + + // 3. Increment its ports and generate new node ID to avoid conflict + + // Same as in `genesis/e2e-tests-single-node.toml` for `validator-0` + let net_address_0 = SocketAddr::from_str("127.0.0.1:27656").unwrap(); + let net_address_port_0 = net_address_0.port(); + + let update_config = |ix: u8, mut config: Config| { + let first_port = net_address_port_0 + 6 * (ix as u16 + 1); + config.ledger.tendermint.p2p_address.set_port(first_port); + config + .ledger + .tendermint + .rpc_address + .set_port(first_port + 1); + config.ledger.shell.ledger_address.set_port(first_port + 2); + config + }; + + let validator_0_copy_config = update_config( + 2, + Config::load(&validator_0_base_dir_copy, &test.net.chain_id, None), + ); + validator_0_copy_config + .write(&validator_0_base_dir_copy, &test.net.chain_id, true) + .unwrap(); + + // Generate a new node key + use rand::prelude::ThreadRng; + use rand::thread_rng; + + let mut rng: ThreadRng = thread_rng(); + let node_sk = ed25519::SigScheme::generate(&mut rng); + let node_sk = key::common::SecretKey::Ed25519(node_sk); + let tm_home_dir = validator_0_base_dir_copy + .join(test.net.chain_id.as_str()) + .join("tendermint"); + let _node_pk = + client::utils::write_tendermint_node_key(&tm_home_dir, node_sk); + + // 4. Run it to get it to double vote and sign block + let loc = format!("{}:{}", std::file!(), std::line!()); + // This node will only connect to `validator_1`, so that nodes + // `validator_0` and `validator_0_copy` should start double signing + let mut validator_0_copy = setup::run_cmd( + Bin::Node, + args, + Some(40), + &test.working_dir, + validator_0_base_dir_copy, + "validator", + loc, + )?; + validator_0_copy.exp_string("Anoma ledger node started")?; + validator_0_copy.exp_string("This node is a validator")?; + let _bg_validator_0_copy = validator_0_copy.background(); + + // 5. Submit a valid token transfer tx to validator 0 + let validator_one_rpc = get_actor_rpc(&test, &Who::Validator(0)); + let tx_args = [ + "transfer", + "--source", + BERTHA, + "--target", + ALBERT, + "--token", + XAN, + "--amount", + "10.1", + "--fee-amount", + "0", + "--gas-limit", + "0", + "--fee-token", + XAN, + "--ledger-address", + &validator_one_rpc, + ]; + let mut client = run!(test, Bin::Client, tx_args, Some(40))?; + client.exp_string("Transaction is valid.")?; + client.assert_success(); + + // 6. Wait for double signing evidence + let mut validator_1 = bg_validator_1.foreground(); + validator_1.exp_string("Processing evidence")?; + validator_1.exp_string("Slashing")?; + + Ok(()) +} diff --git a/tests/src/e2e/setup.rs b/tests/src/e2e/setup.rs index 35c578f2f6..175a998987 100644 --- a/tests/src/e2e/setup.rs +++ b/tests/src/e2e/setup.rs @@ -1,3 +1,4 @@ +use std::collections::HashMap; use std::ffi::OsStr; use std::fmt::Display; use std::fs::{File, OpenOptions}; @@ -16,7 +17,7 @@ use expectrl::process::unix::{PtyStream, UnixProcess}; use expectrl::session::Session; use expectrl::stream::log::LoggedStream; use expectrl::{Eof, WaitStatus}; -use eyre::eyre; +use eyre::{eyre, Context}; use itertools::{Either, Itertools}; use namada::types::chain::ChainId; use namada_apps::client::utils; @@ -24,6 +25,7 @@ use namada_apps::config::genesis::genesis_config::{self, GenesisConfig}; use namada_apps::config::{ethereum_bridge, Config}; use namada_apps::{config, wallet}; use rand::Rng; +use serde_json; use tempfile::{tempdir, TempDir}; use crate::e2e::helpers::generate_bin_command; @@ -91,21 +93,12 @@ pub fn add_validators(num: u8, mut genesis: GenesisConfig) -> GenesisConfig { let validator_0 = genesis.validator.get_mut("validator-0").unwrap(); // Clone the first validator before modifying it let other_validators = validator_0.clone(); - // Set the first validator to be a bootstrap node to enable P2P connectivity - validator_0.intent_gossip_seed = Some(true); - // A bootstrap node doesn't participate in the gossipsub protocol for - // gossiping intents, so we remove its matchmaker - validator_0.matchmaker_account = None; - validator_0.matchmaker_code = None; - validator_0.matchmaker_tx = None; let net_address_0 = SocketAddr::from_str(validator_0.net_address.as_ref().unwrap()) .unwrap(); let net_address_port_0 = net_address_0.port(); for ix in 0..num { let mut validator = other_validators.clone(); - // Only the first validator is bootstrap - validator.intent_gossip_seed = None; let mut net_address = net_address_0; // 6 ports for each validator let first_port = net_address_port_0 + 6 * (ix as u16 + 1); @@ -138,7 +131,7 @@ pub fn network( // Open the source genesis file let genesis = genesis_config::open_genesis_config( working_dir.join(SINGLE_NODE_NET_GENESIS), - ); + )?; // Run the provided function on it let genesis = update_genesis(genesis); @@ -518,7 +511,6 @@ impl AnomaCmd { } /// Assert that the process exited with failure - #[allow(dead_code)] pub fn assert_failure(&mut self) { // Make sure that there is no unread output first let _ = self.exp_eof().unwrap(); @@ -683,9 +675,9 @@ where { // Root cargo workspace manifest path let (bin_name, log_level) = match bin { - Bin::Node => ("namadan", "info"), - Bin::Client => ("namadac", "tendermint_rpc=debug"), - Bin::Wallet => ("namadaw", "info"), + Bin::Node => ("namadan", "debug"), + Bin::Client => ("namadac", "debug"), + Bin::Wallet => ("namadaw", "debug"), }; let mut run_cmd = generate_bin_command( @@ -800,7 +792,6 @@ pub mod constants { pub const CHRISTEL: &str = "Christel"; pub const CHRISTEL_KEY: &str = "Christel-key"; pub const DAEWON: &str = "Daewon"; - pub const MATCHMAKER_KEY: &str = "matchmaker-key"; // Native VP aliases pub const GOVERNANCE_ADDRESS: &str = "governance"; @@ -883,11 +874,42 @@ pub fn copy_wasm_to_chain_dir<'a>( .join(chain_id.as_str()) .join(config::DEFAULT_WASM_DIR); for file in &wasm_files { - std::fs::copy( - working_dir.join("wasm").join(&file), - target_wasm_dir.join(&file), - ) - .unwrap(); + let src = working_dir.join("wasm").join(&file); + let dst = target_wasm_dir.join(&file); + std::fs::copy(&src, &dst) + .wrap_err_with(|| { + format!( + "copying {} to {}", + &src.to_string_lossy(), + &dst.to_string_lossy(), + ) + }) + .unwrap(); } } } + +pub fn get_all_wasms_hashes( + working_dir: &Path, + filter: Option<&str>, +) -> Vec { + let checksums_path = working_dir.join("wasm/checksums.json"); + let checksums_content = fs::read_to_string(checksums_path).unwrap(); + let checksums: HashMap = + serde_json::from_str(&checksums_content).unwrap(); + let filter_prefix = filter.unwrap_or_default(); + checksums + .values() + .filter_map(|wasm| { + if wasm.contains(&filter_prefix) { + Some( + wasm.split('.').collect::>()[1] + .to_owned() + .to_uppercase(), + ) + } else { + None + } + }) + .collect() +} diff --git a/tests/src/lib.rs b/tests/src/lib.rs index 1b75f83bdc..c993b4b72c 100644 --- a/tests/src/lib.rs +++ b/tests/src/lib.rs @@ -9,9 +9,10 @@ mod vm_host_env; pub use vm_host_env::{ibc, tx, vp}; #[cfg(test)] mod e2e; -#[cfg(test)] -mod native_vp; +pub mod native_vp; pub mod storage; +#[cfg(test)] +mod storage_api; /// Using this import requires `tracing` and `tracing-subscriber` dependencies. /// Set env var `RUST_LOG=info` to see the logs from a test run (and diff --git a/tests/src/native_vp/mod.rs b/tests/src/native_vp/mod.rs index 5540ac26d4..8711f86d1d 100644 --- a/tests/src/native_vp/mod.rs +++ b/tests/src/native_vp/mod.rs @@ -1,47 +1,38 @@ -mod pos; +pub mod pos; + +use std::collections::BTreeSet; use namada::ledger::native_vp::{Ctx, NativeVp}; use namada::ledger::storage::mockdb::MockDB; use namada::ledger::storage::traits::Sha256Hasher; -use namada::vm::wasm::compilation_cache; -use namada::vm::wasm::compilation_cache::common::Cache; -use namada::vm::{wasm, WasmCacheRwAccess}; -use tempfile::TempDir; +use namada::types::address::Address; +use namada::types::storage; +use namada::vm::WasmCacheRwAccess; use crate::tx::TestTxEnv; type NativeVpCtx<'a> = Ctx<'a, MockDB, Sha256Hasher, WasmCacheRwAccess>; -type VpCache = Cache; #[derive(Debug)] pub struct TestNativeVpEnv { - pub vp_cache_dir: TempDir, - pub vp_wasm_cache: VpCache, pub tx_env: TestTxEnv, + pub address: Address, + pub verifiers: BTreeSet
, + pub keys_changed: BTreeSet, } impl TestNativeVpEnv { - pub fn new(tx_env: TestTxEnv) -> Self { - let (vp_wasm_cache, vp_cache_dir) = - wasm::compilation_cache::common::testing::cache(); - - Self { - vp_cache_dir, - vp_wasm_cache, - tx_env, - } - } -} + pub fn from_tx_env(tx_env: TestTxEnv, address: Address) -> Self { + // Find the tx verifiers and keys_changes the same way as protocol would + let verifiers = tx_env.get_verifiers(); -impl Default for TestNativeVpEnv { - fn default() -> Self { - let (vp_wasm_cache, vp_cache_dir) = - wasm::compilation_cache::common::testing::cache(); + let keys_changed = tx_env.all_touched_storage_keys(); Self { - vp_cache_dir, - vp_wasm_cache, - tx_env: TestTxEnv::default(), + address, + tx_env, + verifiers, + keys_changed, } } } @@ -51,20 +42,10 @@ impl TestNativeVpEnv { pub fn validate_tx<'a, T>( &'a self, init_native_vp: impl Fn(NativeVpCtx<'a>) -> T, - // The function is applied on the `tx_data` when called - mut apply_tx: impl FnMut(&[u8]), ) -> Result::Error> where T: NativeVp, { - let tx_data = self.tx_env.tx.data.as_ref().cloned().unwrap_or_default(); - apply_tx(&tx_data); - - // Find the tx verifiers and keys_changes the same way as protocol would - let verifiers = self.tx_env.get_verifiers(); - - let keys_changed = self.tx_env.all_touched_storage_keys(); - let ctx = Ctx { iterators: Default::default(), gas_meter: Default::default(), @@ -72,10 +53,13 @@ impl TestNativeVpEnv { write_log: &self.tx_env.write_log, tx: &self.tx_env.tx, vp_wasm_cache: self.tx_env.vp_wasm_cache.clone(), + address: &self.address, + keys_changed: &self.keys_changed, + verifiers: &self.verifiers, }; let tx_data = self.tx_env.tx.data.as_ref().cloned().unwrap_or_default(); let native_vp = init_native_vp(ctx); - native_vp.validate_tx(&tx_data, &keys_changed, &verifiers) + native_vp.validate_tx(&tx_data, &self.keys_changed, &self.verifiers) } } diff --git a/tests/src/native_vp/pos.rs b/tests/src/native_vp/pos.rs index e7c7daf7c1..ec6f75a15f 100644 --- a/tests/src/native_vp/pos.rs +++ b/tests/src/native_vp/pos.rs @@ -23,7 +23,7 @@ //! ## Pos Parameters //! //! Arbitrary valid PoS parameters are provided from its module via -//! [`namada_vm_env::proof_of_stake::parameters::testing::arb_pos_params`]. +//! [`namada_tx_prelude::proof_of_stake::parameters::testing::arb_pos_params`]. //! //! ## Valid transitions //! @@ -34,7 +34,7 @@ //! the modifications of its predecessor transition). //! //! The PoS storage modifications are modelled using -//! [`testing::PosStorageChange`]. +//! `testing::PosStorageChange`. //! //! - Bond: Requires a validator account in the state (the `#{validator}` //! segments in the keys below). Some of the storage change are optional, @@ -50,7 +50,7 @@ //! - Unbond: Requires a bond in the state (the `#{owner}` and `#{validator}` //! segments in the keys below must be the owner and a validator of an //! existing bond). The bond's total amount must be greater or equal to the -//! amount that' being unbonded. Some of the storage changes are optional, +//! amount that is being unbonded. Some of the storage changes are optional, //! which depends on whether the unbonding decreases voting power of the //! validator. //! - `#{PoS}/bond/#{owner}/#{validator}` @@ -99,16 +99,56 @@ //! - add arb invalid storage changes //! - add slashes +use namada::ledger::pos::namada_proof_of_stake::PosBase; +use namada::types::storage::Epoch; +use namada_tx_prelude::proof_of_stake::{ + staking_token_address, GenesisValidator, PosParams, +}; + +use crate::tx::tx_host_env; + +/// initialize proof-of-stake genesis with the given list of validators and +/// parameters. +pub fn init_pos( + genesis_validators: &[GenesisValidator], + params: &PosParams, + start_epoch: Epoch, +) { + tx_host_env::init(); + + tx_host_env::with(|tx_env| { + // Ensure that all the used + // addresses exist + tx_env.spawn_accounts([&staking_token_address()]); + for validator in genesis_validators { + tx_env.spawn_accounts([ + &validator.address, + &validator.staking_reward_address, + ]); + } + tx_env.storage.block.epoch = start_epoch; + // Initialize PoS storage + tx_env + .storage + .init_genesis( + params, + genesis_validators.iter(), + u64::from(start_epoch), + ) + .unwrap(); + }); +} + #[cfg(test)] mod tests { - use namada::ledger::pos::namada_proof_of_stake::PosBase; use namada::ledger::pos::PosParams; + use namada::types::key::common::PublicKey; use namada::types::storage::Epoch; - use namada::types::token; - use namada_vm_env::proof_of_stake::parameters::testing::arb_pos_params; - use namada_vm_env::proof_of_stake::{staking_token_address, PosVP}; - use namada_vm_env::tx_prelude::Address; + use namada::types::{address, token}; + use namada_tx_prelude::proof_of_stake::parameters::testing::arb_pos_params; + use namada_tx_prelude::proof_of_stake::PosVP; + use namada_tx_prelude::Address; use proptest::prelude::*; use proptest::prop_state_machine; use proptest::state_machine::{AbstractStateMachine, StateMachineTest}; @@ -119,8 +159,9 @@ mod tests { arb_invalid_pos_action, arb_valid_pos_action, InvalidPosAction, ValidPosAction, }; + use super::*; use crate::native_vp::TestNativeVpEnv; - use crate::tx::{tx_host_env, TestTxEnv}; + use crate::tx::tx_host_env; prop_state_machine! { #![proptest_config(Config { @@ -163,6 +204,7 @@ mod tests { } /// State machine transitions + #[allow(clippy::large_enum_variant)] #[derive(Clone, Debug)] enum Transition { /// Commit all the tx changes already applied in the tx env @@ -189,28 +231,8 @@ mod tests { ) -> Self::ConcreteState { println!(); println!("New test case"); - // Initialize the transaction env - let mut tx_env = TestTxEnv::default(); - - // Set the epoch - let storage = &mut tx_env.storage; - storage.block.epoch = initial_state.epoch; - - // Initialize PoS storage - storage - .init_genesis( - &initial_state.params, - [].into_iter(), - initial_state.epoch, - ) - .unwrap(); - - // Make sure that the staking token account exist - tx_env.spawn_accounts([staking_token_address()]); - - // Use the `tx_env` for host env calls - tx_host_env::set(tx_env); + init_pos(&[], &initial_state.params, initial_state.epoch); // The "genesis" block state for change in initial_state.committed_valid_actions { @@ -379,8 +401,12 @@ mod tests { Transition::CommitTx => true, Transition::NextEpoch => true, Transition::Valid(action) => match action { - ValidPosAction::InitValidator(address) => { + ValidPosAction::InitValidator { + address, + consensus_key, + } => { !state.is_validator(address) + && !state.is_used_key(consensus_key) } ValidPosAction::Bond { amount: _, @@ -410,11 +436,13 @@ mod tests { fn validate_transitions(&self) { // Use the tx_env to run PoS VP let tx_env = tx_host_env::take(); - let vp_env = TestNativeVpEnv::new(tx_env); - let result: Result = - vp_env.validate_tx(PosVP::new, |_tx_data| {}); + + let vp_env = TestNativeVpEnv::from_tx_env(tx_env, address::POS); + let result = vp_env.validate_tx(PosVP::new); + // Put the tx_env back before checking the result tx_host_env::set(vp_env.tx_env); + let result = result.expect("Validation of valid changes must not fail!"); @@ -458,7 +486,19 @@ mod tests { /// Find if the given address is a validator fn is_validator(&self, addr: &Address) -> bool { self.all_valid_actions().iter().any(|action| match action { - ValidPosAction::InitValidator(validator) => validator == addr, + ValidPosAction::InitValidator { address, .. } => { + address == addr + } + _ => false, + }) + } + + /// Find if the given consensus key is already used by any validators + fn is_used_key(&self, given_consensus_key: &PublicKey) -> bool { + self.all_valid_actions().iter().any(|action| match action { + ValidPosAction::InitValidator { consensus_key, .. } => { + consensus_key == given_consensus_key + } _ => false, }) } @@ -539,20 +579,20 @@ pub mod testing { use namada::types::key::RefTo; use namada::types::storage::Epoch; use namada::types::{address, key, token}; - use namada_vm_env::proof_of_stake::epoched::{ + use namada_tx_prelude::proof_of_stake::epoched::{ DynEpochOffset, Epoched, EpochedDelta, }; - use namada_vm_env::proof_of_stake::types::{ + use namada_tx_prelude::proof_of_stake::types::{ Bond, Unbond, ValidatorState, VotingPower, VotingPowerDelta, WeightedValidator, }; - use namada_vm_env::proof_of_stake::{ + use namada_tx_prelude::proof_of_stake::{ staking_token_address, BondId, Bonds, PosParams, Unbonds, }; - use namada_vm_env::tx_prelude::{Address, PoS}; + use namada_tx_prelude::{Address, StorageRead, StorageWrite}; use proptest::prelude::*; - use crate::tx::tx_host_env; + use crate::tx::{self, tx_host_env}; #[derive(Clone, Debug, Default)] pub struct TestValidator { @@ -569,7 +609,10 @@ pub mod testing { #[derive(Clone, Debug)] pub enum ValidPosAction { - InitValidator(Address), + InitValidator { + address: Address, + consensus_key: PublicKey, + }, Bond { amount: token::Amount, owner: Address, @@ -597,11 +640,14 @@ pub mod testing { #[derivative(Debug)] pub enum PosStorageChange { - /// Ensure that the account exists when initialing a valid new + /// Ensure that the account exists when initializing a valid new /// validator or delegation from a new owner SpawnAccount { address: Address, }, + /// Add tokens included in a new bond at given offset. Bonded tokens + /// are added at pipeline offset and unbonded tokens are added as + /// negative values at unbonding offset. Bond { owner: Address, validator: Address, @@ -656,6 +702,8 @@ pub mod testing { }, ValidatorAddressRawHash { address: Address, + #[derivative(Debug = "ignore")] + consensus_key: PublicKey, }, } @@ -665,13 +713,21 @@ pub mod testing { let validators: Vec
= valid_actions .iter() .filter_map(|action| match action { - ValidPosAction::InitValidator(addr) => Some(addr.clone()), + ValidPosAction::InitValidator { address, .. } => { + Some(address.clone()) + } _ => None, }) .collect(); - let init_validator = address::testing::arb_established_address() - .prop_map(|addr| { - ValidPosAction::InitValidator(Address::Established(addr)) + let init_validator = ( + address::testing::arb_established_address(), + key::testing::arb_common_keypair(), + ) + .prop_map(|(addr, consensus_key)| { + ValidPosAction::InitValidator { + address: Address::Established(addr), + consensus_key: consensus_key.ref_to(), + } }); if validators.is_empty() { @@ -784,8 +840,8 @@ pub mod testing { /// the VP. pub fn apply(self, is_current_tx_valid: bool) { // Read the PoS parameters - use namada_vm_env::tx_prelude::PosRead; - let params = PoS.read_pos_params(); + use namada_tx_prelude::PosRead; + let params = tx::ctx().read_pos_params().unwrap(); let current_epoch = tx_host_env::with(|env| { // Reset the gas meter on each change, so that we never run @@ -812,46 +868,50 @@ pub mod testing { params: &PosParams, current_epoch: Epoch, ) -> PosStorageChanges { - use namada_vm_env::tx_prelude::PosRead; + use namada_tx_prelude::PosRead; match self { - ValidPosAction::InitValidator(addr) => { + ValidPosAction::InitValidator { + address, + consensus_key, + } => { let offset = DynEpochOffset::PipelineLen; vec![ PosStorageChange::SpawnAccount { - address: addr.clone(), + address: address.clone(), }, PosStorageChange::ValidatorAddressRawHash { - address: addr.clone(), + address: address.clone(), + consensus_key: consensus_key.clone(), }, PosStorageChange::ValidatorSet { - validator: addr.clone(), + validator: address.clone(), token_delta: 0, offset, }, PosStorageChange::ValidatorConsensusKey { - validator: addr.clone(), - pk: key::testing::keypair_1().ref_to(), + validator: address.clone(), + pk: consensus_key, }, PosStorageChange::ValidatorStakingRewardsAddress { - validator: addr.clone(), + validator: address.clone(), address: address::testing::established_address_1(), }, PosStorageChange::ValidatorState { - validator: addr.clone(), + validator: address.clone(), state: ValidatorState::Pending, }, PosStorageChange::ValidatorState { - validator: addr.clone(), + validator: address.clone(), state: ValidatorState::Candidate, }, PosStorageChange::ValidatorTotalDeltas { - validator: addr.clone(), + validator: address.clone(), delta: 0, offset, }, PosStorageChange::ValidatorVotingPower { - validator: addr, + validator: address, vp_delta: 0, offset: Either::Left(offset), }, @@ -870,8 +930,10 @@ pub mod testing { // Read the validator's current total deltas (this may be // updated by previous transition(s) within the same // transaction via write log) - let validator_total_deltas = - PoS.read_validator_total_deltas(&validator).unwrap(); + let validator_total_deltas = tx::ctx() + .read_validator_total_deltas(&validator) + .unwrap() + .unwrap(); let total_delta = validator_total_deltas .get_at_offset(current_epoch, offset, params) .unwrap_or_default(); @@ -1008,8 +1070,10 @@ pub mod testing { // Read the validator's current total deltas (this may be // updated by previous transition(s) within the same // transaction via write log) - let validator_total_deltas_cur = - PoS.read_validator_total_deltas(&validator).unwrap(); + let validator_total_deltas_cur = tx::ctx() + .read_validator_total_deltas(&validator) + .unwrap() + .unwrap(); let total_delta_cur = validator_total_deltas_cur .get_at_offset(current_epoch, offset, params) .unwrap_or_default(); @@ -1074,10 +1138,12 @@ pub mod testing { changes } ValidPosAction::Withdraw { owner, validator } => { - let unbonds = PoS.read_unbond(&BondId { - source: owner.clone(), - validator: validator.clone(), - }); + let unbonds = tx::ctx() + .read_unbond(&BondId { + source: owner.clone(), + validator: validator.clone(), + }) + .unwrap(); let token_delta: i128 = unbonds .and_then(|unbonds| unbonds.get(current_epoch)) @@ -1109,7 +1175,7 @@ pub mod testing { // invalid changes is_current_tx_valid: bool, ) { - use namada_vm_env::tx_prelude::{PosRead, PosWrite}; + use namada_tx_prelude::{PosRead, PosWrite}; match change { PosStorageChange::SpawnAccount { address } => { @@ -1127,14 +1193,15 @@ pub mod testing { source: owner, validator, }; - let bonds = PoS.read_bond(&bond_id); + let bonds = tx::ctx().read_bond(&bond_id).unwrap(); let bonds = if delta >= 0 { let amount: u64 = delta.try_into().unwrap(); let amount: token::Amount = amount.into(); let mut value = Bond { - deltas: HashMap::default(), + pos_deltas: HashMap::default(), + neg_deltas: Default::default(), }; - value.deltas.insert( + value.pos_deltas.insert( (current_epoch + offset.value(params)).into(), amount, ); @@ -1159,39 +1226,32 @@ pub mod testing { ); bonds } - None => Bonds::init(value, current_epoch, params), + None => Bonds::init_at_offset( + value, + current_epoch, + offset, + params, + ), } } else { let mut bonds = bonds.unwrap_or_else(|| { Bonds::init(Default::default(), current_epoch, params) }); let to_unbond: u64 = (-delta).try_into().unwrap(); - let mut to_unbond: token::Amount = to_unbond.into(); - let to_unbond = &mut to_unbond; - bonds.rev_update_while( - |bonds, _epoch| { - bonds.deltas.retain(|_epoch_start, bond_delta| { - if *to_unbond == 0.into() { - return true; - } - if to_unbond > bond_delta { - *to_unbond -= *bond_delta; - *bond_delta = 0.into(); - } else { - *bond_delta -= *to_unbond; - *to_unbond = 0.into(); - } - // Remove bonds with no tokens left - *bond_delta != 0.into() - }); - *to_unbond != 0.into() + let to_unbond: token::Amount = to_unbond.into(); + + bonds.add_at_offset( + Bond { + pos_deltas: Default::default(), + neg_deltas: to_unbond, }, current_epoch, + offset, params, ); bonds }; - PoS.write_bond(&bond_id, bonds); + tx::ctx().write_bond(&bond_id, bonds).unwrap(); } PosStorageChange::Unbond { owner, @@ -1203,8 +1263,8 @@ pub mod testing { source: owner, validator, }; - let bonds = PoS.read_bond(&bond_id).unwrap(); - let unbonds = PoS.read_unbond(&bond_id); + let bonds = tx::ctx().read_bond(&bond_id).unwrap().unwrap(); + let unbonds = tx::ctx().read_unbond(&bond_id).unwrap(); let amount: u64 = delta.try_into().unwrap(); let mut to_unbond: token::Amount = amount.into(); let mut value = Unbond { @@ -1218,7 +1278,7 @@ pub mod testing { && bond_epoch >= bonds.last_update().into() { if let Some(bond) = bonds.get_delta_at_epoch(bond_epoch) { - for (start_epoch, delta) in &bond.deltas { + for (start_epoch, delta) in &bond.pos_deltas { if delta >= &to_unbond { value.deltas.insert( ( @@ -1261,10 +1321,11 @@ pub mod testing { } None => Unbonds::init(value, current_epoch, params), }; - PoS.write_unbond(&bond_id, unbonds); + tx::ctx().write_unbond(&bond_id, unbonds).unwrap(); } PosStorageChange::TotalVotingPower { vp_delta, offset } => { - let mut total_voting_powers = PoS.read_total_voting_power(); + let mut total_voting_powers = + tx::ctx().read_total_voting_power().unwrap(); let vp_delta: i64 = vp_delta.try_into().unwrap(); match offset { Either::Left(offset) => { @@ -1284,10 +1345,17 @@ pub mod testing { ); } } - PoS.write_total_voting_power(total_voting_powers) + tx::ctx() + .write_total_voting_power(total_voting_powers) + .unwrap() } - PosStorageChange::ValidatorAddressRawHash { address } => { - PoS.write_validator_address_raw_hash(&address); + PosStorageChange::ValidatorAddressRawHash { + address, + consensus_key, + } => { + tx::ctx() + .write_validator_address_raw_hash(&address, &consensus_key) + .unwrap(); } PosStorageChange::ValidatorSet { validator, @@ -1303,8 +1371,9 @@ pub mod testing { ); } PosStorageChange::ValidatorConsensusKey { validator, pk } => { - let consensus_key = PoS + let consensus_key = tx::ctx() .read_validator_consensus_key(&validator) + .unwrap() .map(|mut consensus_keys| { consensus_keys.set(pk.clone(), current_epoch, params); consensus_keys @@ -1312,21 +1381,26 @@ pub mod testing { .unwrap_or_else(|| { Epoched::init(pk, current_epoch, params) }); - PoS.write_validator_consensus_key(&validator, consensus_key); + tx::ctx() + .write_validator_consensus_key(&validator, consensus_key) + .unwrap(); } PosStorageChange::ValidatorStakingRewardsAddress { validator, address, } => { - PoS.write_validator_staking_reward_address(&validator, address); + tx::ctx() + .write_validator_staking_reward_address(&validator, address) + .unwrap(); } PosStorageChange::ValidatorTotalDeltas { validator, delta, offset, } => { - let total_deltas = PoS + let total_deltas = tx::ctx() .read_validator_total_deltas(&validator) + .unwrap() .map(|mut total_deltas| { total_deltas.add_at_offset( delta, @@ -1344,15 +1418,18 @@ pub mod testing { params, ) }); - PoS.write_validator_total_deltas(&validator, total_deltas); + tx::ctx() + .write_validator_total_deltas(&validator, total_deltas) + .unwrap(); } PosStorageChange::ValidatorVotingPower { validator, vp_delta: delta, offset, } => { - let voting_power = PoS + let voting_power = tx::ctx() .read_validator_voting_power(&validator) + .unwrap() .map(|mut voting_powers| { match offset { Either::Left(offset) => { @@ -1382,11 +1459,14 @@ pub mod testing { params, ) }); - PoS.write_validator_voting_power(&validator, voting_power); + tx::ctx() + .write_validator_voting_power(&validator, voting_power) + .unwrap(); } PosStorageChange::ValidatorState { validator, state } => { - let state = PoS + let state = tx::ctx() .read_validator_state(&validator) + .unwrap() .map(|mut states| { states.set(state, current_epoch, params); states @@ -1394,16 +1474,15 @@ pub mod testing { .unwrap_or_else(|| { Epoched::init_at_genesis(state, current_epoch) }); - PoS.write_validator_state(&validator, state); + tx::ctx().write_validator_state(&validator, state).unwrap(); } PosStorageChange::StakingTokenPosBalance { delta } => { let balance_key = token::balance_key( &staking_token_address(), - &::POS_ADDRESS, - ) - .to_string(); + &::POS_ADDRESS, + ); let mut balance: token::Amount = - tx_host_env::read(&balance_key).unwrap_or_default(); + tx::ctx().read(&balance_key).unwrap().unwrap_or_default(); if delta < 0 { let to_spend: u64 = (-delta).try_into().unwrap(); let to_spend: token::Amount = to_spend.into(); @@ -1413,16 +1492,17 @@ pub mod testing { let to_recv: token::Amount = to_recv.into(); balance.receive(&to_recv); } - tx_host_env::write(&balance_key, balance); + tx::ctx().write(&balance_key, balance).unwrap(); } PosStorageChange::WithdrawUnbond { owner, validator } => { let bond_id = BondId { source: owner, validator, }; - let mut unbonds = PoS.read_unbond(&bond_id).unwrap(); + let mut unbonds = + tx::ctx().read_unbond(&bond_id).unwrap().unwrap(); unbonds.delete_current(current_epoch, params); - PoS.write_unbond(&bond_id, unbonds); + tx::ctx().write_unbond(&bond_id, unbonds).unwrap(); } } } @@ -1434,12 +1514,12 @@ pub mod testing { current_epoch: Epoch, params: &PosParams, ) { - use namada_vm_env::tx_prelude::{PosRead, PosWrite}; + use namada_tx_prelude::{PosRead, PosWrite}; let validator_total_deltas = - PoS.read_validator_total_deltas(&validator); + tx::ctx().read_validator_total_deltas(&validator).unwrap(); // println!("Read validator set"); - let mut validator_set = PoS.read_validator_set(); + let mut validator_set = tx::ctx().read_validator_set().unwrap(); // println!("Read validator set: {:#?}", validator_set); validator_set.update_from_offset( |validator_set, epoch| { @@ -1546,7 +1626,7 @@ pub mod testing { params, ); // println!("Write validator set {:#?}", validator_set); - PoS.write_validator_set(validator_set); + tx::ctx().write_validator_set(validator_set).unwrap(); } pub fn arb_invalid_pos_action( @@ -1571,7 +1651,9 @@ pub mod testing { let validators: Vec
= valid_actions .iter() .filter_map(|action| match action { - ValidPosAction::InitValidator(addr) => Some(addr.clone()), + ValidPosAction::InitValidator { address, .. } => { + Some(address.clone()) + } _ => None, }) .collect(); @@ -1593,7 +1675,7 @@ pub mod testing { // any u64 but `0` let arb_delta = - prop_oneof![(-(u64::MAX as i128)..0), (1..=u64::MAX as i128),]; + prop_oneof![(-(u32::MAX as i128)..0), (1..=u32::MAX as i128),]; prop_oneof![ ( @@ -1626,8 +1708,8 @@ pub mod testing { /// Apply an invalid PoS storage action. pub fn apply(self) { // Read the PoS parameters - use namada_vm_env::tx_prelude::PosRead; - let params = PoS.read_pos_params(); + use namada_tx_prelude::PosRead; + let params = tx::ctx().read_pos_params().unwrap(); for (epoch, changes) in self.changes { for change in changes { @@ -1642,9 +1724,9 @@ pub mod testing { params: &PosParams, current_epoch: Epoch, ) -> bool { - use namada_vm_env::tx_prelude::PosRead; + use namada_tx_prelude::PosRead; - let validator_sets = PoS.read_validator_set(); + let validator_sets = tx::ctx().read_validator_set().unwrap(); let validator_set = validator_sets .get_at_offset(current_epoch, DynEpochOffset::PipelineLen, params) .unwrap(); diff --git a/tests/src/storage_api/collections/lazy_map.rs b/tests/src/storage_api/collections/lazy_map.rs new file mode 100644 index 0000000000..afff09bbf1 --- /dev/null +++ b/tests/src/storage_api/collections/lazy_map.rs @@ -0,0 +1,613 @@ +#[cfg(test)] +mod tests { + use std::collections::BTreeMap; + use std::convert::TryInto; + + use borsh::{BorshDeserialize, BorshSerialize}; + use namada::types::address::{self, Address}; + use namada::types::storage; + use namada_tx_prelude::storage::KeySeg; + use namada_tx_prelude::storage_api::collections::{ + lazy_map, LazyCollection, LazyMap, + }; + use proptest::prelude::*; + use proptest::prop_state_machine; + use proptest::state_machine::{AbstractStateMachine, StateMachineTest}; + use proptest::test_runner::Config; + use test_log::test; + + use crate::tx::tx_host_env; + use crate::vp::vp_host_env; + + prop_state_machine! { + #![proptest_config(Config { + // Instead of the default 256, we only run 5 because otherwise it + // takes too long and it's preferable to crank up the number of + // transitions instead, to allow each case to run for more epochs as + // some issues only manifest once the model progresses further. + // Additionally, more cases will be explored every time this test is + // executed in the CI. + cases: 5, + .. Config::default() + })] + #[test] + fn lazy_map_api_state_machine_test(sequential 1..100 => ConcreteLazyMapState); + } + + /// Type of key used in the map + type TestKey = u64; + + /// Some borsh-serializable type with arbitrary fields to be used inside + /// LazyMap state machine test + #[derive( + Clone, + Debug, + BorshSerialize, + BorshDeserialize, + PartialEq, + Eq, + PartialOrd, + Ord, + )] + struct TestVal { + x: u64, + y: bool, + } + + /// A `StateMachineTest` implemented on this struct manipulates it with + /// `Transition`s, which are also being accumulated into + /// `current_transitions`. It then: + /// + /// - checks its state against an in-memory `std::collections::HashMap` + /// - runs validation and checks that the `LazyMap::Action`s reported from + /// validation match with transitions that were applied + /// + /// Additionally, one of the transitions is to commit a block and/or + /// transaction, during which the currently accumulated state changes are + /// persisted, or promoted from transaction write log to block's write log. + #[derive(Debug)] + struct ConcreteLazyMapState { + /// Address is used to prefix the storage key of the `lazy_map` in + /// order to simulate a transaction and a validity predicate + /// check from changes on the `lazy_map` + address: Address, + /// In the test, we apply the same transitions on the `lazy_map` as on + /// `eager_map` to check that `lazy_map`'s state is consistent with + /// `eager_map`. + eager_map: BTreeMap, + /// Handle to a lazy map + lazy_map: LazyMap, + /// Valid LazyMap changes in the current transaction + current_transitions: Vec, + } + + #[derive(Clone, Debug, Default)] + struct AbstractLazyMapState { + /// Valid LazyMap changes in the current transaction + valid_transitions: Vec, + /// Valid LazyMap changes committed to storage + committed_transitions: Vec, + } + + /// Possible transitions that can modify a [`LazyMap`]. + /// This roughly corresponds to the methods that have `StorageWrite` + /// access and is very similar to [`Action`] + #[derive(Clone, Debug)] + enum Transition { + /// Commit all valid transitions in the current transaction + CommitTx, + /// Commit all valid transitions in the current transaction and also + /// commit the current block + CommitTxAndBlock, + /// Insert a key-val into a [`LazyMap`] + Insert(TestKey, TestVal), + /// Remove a key-val from a [`LazyMap`] + Remove(TestKey), + /// Update a value at key from pre to post state in a + /// [`LazyMap`] + Update(TestKey, TestVal), + } + + impl AbstractStateMachine for AbstractLazyMapState { + type State = Self; + type Transition = Transition; + + fn init_state() -> BoxedStrategy { + Just(Self::default()).boxed() + } + + // Apply a random transition to the state + fn transitions(state: &Self::State) -> BoxedStrategy { + let length = state.len(); + if length == 0 { + prop_oneof![ + 1 => Just(Transition::CommitTx), + 1 => Just(Transition::CommitTxAndBlock), + 3 => (arb_map_key(), arb_map_val()).prop_map(|(key, val)| Transition::Insert(key, val)) + ] + .boxed() + } else { + let keys = state.find_existing_keys(); + let arb_existing_map_key = + || proptest::sample::select(keys.clone()); + prop_oneof![ + 1 => Just(Transition::CommitTx), + 1 => Just(Transition::CommitTxAndBlock), + 3 => (arb_existing_map_key(), arb_map_val()).prop_map(|(key, val)| + Transition::Update(key, val) + ), + 3 => arb_existing_map_key().prop_map(Transition::Remove), + 5 => (arb_map_key().prop_filter("insert on non-existing keys only", + move |key| !keys.contains(key)), arb_map_val()) + .prop_map(|(key, val)| Transition::Insert(key, val)) + ] + .boxed() + } + } + + fn apply_abstract( + mut state: Self::State, + transition: &Self::Transition, + ) -> Self::State { + match transition { + Transition::CommitTx | Transition::CommitTxAndBlock => { + let valid_actions_to_commit = + std::mem::take(&mut state.valid_transitions); + state + .committed_transitions + .extend(valid_actions_to_commit.into_iter()); + } + _ => state.valid_transitions.push(transition.clone()), + } + state + } + + fn preconditions( + state: &Self::State, + transition: &Self::Transition, + ) -> bool { + let length = state.len(); + // Ensure that the remove or update transitions are not applied + // to an empty state + if length == 0 + && matches!( + transition, + Transition::Remove(_) | Transition::Update(_, _) + ) + { + return false; + } + match transition { + Transition::Update(key, _) | Transition::Remove(key) => { + let keys = state.find_existing_keys(); + // Ensure that the update/remove key is an existing one + keys.contains(key) + } + Transition::Insert(key, _) => { + let keys = state.find_existing_keys(); + // Ensure that the insert key is not an existing one + !keys.contains(key) + } + _ => true, + } + } + } + + impl StateMachineTest for ConcreteLazyMapState { + type Abstract = AbstractLazyMapState; + type ConcreteState = Self; + + fn init_test( + _initial_state: ::State, + ) -> Self::ConcreteState { + // Init transaction env in which we'll be applying the transitions + tx_host_env::init(); + + // The lazy_map's path must be prefixed by the address to be able + // to trigger a validity predicate on it + let address = address::testing::established_address_1(); + tx_host_env::with(|env| env.spawn_accounts([&address])); + let lazy_map_prefix: storage::Key = address.to_db_key().into(); + + Self { + address, + eager_map: BTreeMap::new(), + lazy_map: LazyMap::open( + lazy_map_prefix.push(&"arbitrary".to_string()).unwrap(), + ), + current_transitions: vec![], + } + } + + fn apply_concrete( + mut state: Self::ConcreteState, + transition: ::Transition, + ) -> Self::ConcreteState { + // Apply transitions in transaction env + let ctx = tx_host_env::ctx(); + + // Persist the transitions in the current tx, or clear previous ones + // if we're committing a tx + match &transition { + Transition::CommitTx | Transition::CommitTxAndBlock => { + state.current_transitions = vec![]; + } + _ => { + state.current_transitions.push(transition.clone()); + } + } + + // Transition application on lazy map and post-conditions: + match &transition { + Transition::CommitTx => { + // commit the tx without committing the block + tx_host_env::with(|env| env.write_log.commit_tx()); + } + Transition::CommitTxAndBlock => { + // commit the tx and the block + tx_host_env::commit_tx_and_block(); + } + Transition::Insert(key, value) => { + state.lazy_map.insert(ctx, *key, value.clone()).unwrap(); + + // Post-conditions: + let stored_value = + state.lazy_map.get(ctx, key).unwrap().unwrap(); + assert_eq!( + &stored_value, value, + "the new item must be added to the back" + ); + + state.assert_validation_accepted(); + } + Transition::Remove(key) => { + let removed = + state.lazy_map.remove(ctx, key).unwrap().unwrap(); + + // Post-conditions: + assert_eq!( + &removed, + state.eager_map.get(key).unwrap(), + "removed element matches the value in eager map \ + before it's updated" + ); + + state.assert_validation_accepted(); + } + Transition::Update(key, value) => { + let old_val = + state.lazy_map.get(ctx, key).unwrap().unwrap(); + + state.lazy_map.insert(ctx, *key, value.clone()).unwrap(); + + // Post-conditions: + let new_val = + state.lazy_map.get(ctx, key).unwrap().unwrap(); + assert_eq!( + &old_val, + state.eager_map.get(key).unwrap(), + "old value must match the value at the same key in \ + the eager map before it's updated" + ); + assert_eq!( + &new_val, value, + "new value must match that which was passed into the \ + Transition::Update" + ); + + state.assert_validation_accepted(); + } + } + + // Apply transition in the eager map for comparison + apply_transition_on_eager_map(&mut state.eager_map, &transition); + + // Global post-conditions: + + // All items in eager map must be present in lazy map + for (key, expected_item) in state.eager_map.iter() { + let got = + state.lazy_map.get(ctx, key).unwrap().expect( + "The expected item must be present in lazy map", + ); + assert_eq!(expected_item, &got, "at key {key}"); + } + + // All items in lazy map must be present in eager map + for key_val in state.lazy_map.iter(ctx).unwrap() { + let (key, expected_val) = key_val.unwrap(); + let got = state + .eager_map + .get(&key) + .expect("The expected item must be present in eager map"); + assert_eq!(&expected_val, got, "at key {key}"); + } + + state + } + } + + impl AbstractLazyMapState { + /// Find the length of the map from the applied transitions + fn len(&self) -> u64 { + (map_len_diff_from_transitions(self.committed_transitions.iter()) + + map_len_diff_from_transitions(self.valid_transitions.iter())) + .try_into() + .expect( + "It shouldn't be possible to underflow length from all \ + transactions applied in abstract state", + ) + } + + /// Build an eager map from the committed and current transitions + fn eager_map(&self) -> BTreeMap { + let mut eager_map = BTreeMap::new(); + for transition in &self.committed_transitions { + apply_transition_on_eager_map(&mut eager_map, transition); + } + for transition in &self.valid_transitions { + apply_transition_on_eager_map(&mut eager_map, transition); + } + eager_map + } + + /// Find the keys currently present in the map + fn find_existing_keys(&self) -> Vec { + self.eager_map().keys().cloned().collect() + } + } + + /// Find the difference in length of the map from the applied transitions + fn map_len_diff_from_transitions<'a>( + transitions: impl Iterator, + ) -> i64 { + let mut insert_count: i64 = 0; + let mut remove_count: i64 = 0; + + for trans in transitions { + match trans { + Transition::CommitTx + | Transition::CommitTxAndBlock + | Transition::Update(_, _) => {} + Transition::Insert(_, _) => insert_count += 1, + Transition::Remove(_) => remove_count += 1, + } + } + insert_count - remove_count + } + + impl ConcreteLazyMapState { + fn assert_validation_accepted(&self) { + // Init the VP env from tx env in which we applied the map + // transitions + let tx_env = tx_host_env::take(); + vp_host_env::init_from_tx(self.address.clone(), tx_env, |_| {}); + + // Simulate a validity predicate run using the lazy map's validation + // helpers + let changed_keys = + vp_host_env::with(|env| env.all_touched_storage_keys()); + + let mut validation_builder = None; + + // Push followed by pop is a no-op, in which case we'd still see the + // changed keys for these actions, but they wouldn't affect the + // validation result and they never get persisted, but we'd still + // them as changed key here. To guard against this case, + // we check that `map_len_from_transitions` is not empty. + let map_len_diff = + map_len_diff_from_transitions(self.current_transitions.iter()); + + // To help debug validation issues... + dbg!( + &self.current_transitions, + &changed_keys + .iter() + .map(storage::Key::to_string) + .collect::>() + ); + + for key in &changed_keys { + let is_sub_key = self + .lazy_map + .accumulate( + vp_host_env::ctx(), + &mut validation_builder, + key, + ) + .unwrap(); + + assert!( + is_sub_key, + "We're only modifying the lazy_map's keys here. Key: \ + \"{key}\", map length diff {map_len_diff}" + ); + } + if !changed_keys.is_empty() && map_len_diff != 0 { + assert!( + validation_builder.is_some(), + "If some keys were changed, the builder must get filled in" + ); + let actions = LazyMap::::validate( + validation_builder.unwrap(), + ) + .unwrap(); + let mut actions_to_check = actions.clone(); + + // Check that every transition has a corresponding action from + // validation. We drop the found actions to check that all + // actions are matched too. + let current_transitions = + normalize_transitions(&self.current_transitions); + for transition in ¤t_transitions { + match transition { + Transition::CommitTx | Transition::CommitTxAndBlock => { + } + Transition::Insert(expected_key, expected_val) => { + for (ix, action) in + actions_to_check.iter().enumerate() + { + if let lazy_map::Action::Insert(key, val) = + action + { + if expected_key == key + && expected_val == val + { + actions_to_check.remove(ix); + break; + } + } + } + } + Transition::Remove(expected_key) => { + for (ix, action) in + actions_to_check.iter().enumerate() + { + if let lazy_map::Action::Remove(key, _val) = + action + { + if expected_key == key { + actions_to_check.remove(ix); + break; + } + } + } + } + Transition::Update(expected_key, value) => { + for (ix, action) in + actions_to_check.iter().enumerate() + { + if let lazy_map::Action::Update { + key, + pre: _, + post, + } = action + { + if expected_key == key && post == value { + actions_to_check.remove(ix); + break; + } + } + } + } + } + } + + assert!( + actions_to_check.is_empty(), + "All the actions reported from validation {actions:#?} \ + should have been matched with SM transitions \ + {current_transitions:#?}, but these actions didn't \ + match: {actions_to_check:#?}", + ) + } + + // Put the tx_env back before checking the result + tx_host_env::set_from_vp_env(vp_host_env::take()); + } + } + + /// Generate an arbitrary `TestKey` + fn arb_map_key() -> impl Strategy { + any::() + } + + /// Generate an arbitrary `TestVal` + fn arb_map_val() -> impl Strategy { + (any::(), any::()).prop_map(|(x, y)| TestVal { x, y }) + } + + /// Apply `Transition` on an eager `Map`. + fn apply_transition_on_eager_map( + map: &mut BTreeMap, + transition: &Transition, + ) { + match transition { + Transition::CommitTx | Transition::CommitTxAndBlock => {} + Transition::Insert(key, value) => { + map.insert(*key, value.clone()); + } + Transition::Remove(key) => { + let _popped = map.remove(key); + } + Transition::Update(key, value) => { + let entry = map.get_mut(key).unwrap(); + *entry = value.clone(); + } + } + } + + /// Normalize transitions: + /// - remove(key) + insert(key, val) -> update(key, val) + /// - insert(key, val) + update(key, new_val) -> insert(key, new_val) + /// - update(key, val) + update(key, new_val) -> update(key, new_val) + /// + /// Note that the normalizable transitions pairs do not have to be directly + /// next to each other, but their order does matter. + fn normalize_transitions(transitions: &[Transition]) -> Vec { + let mut collapsed = vec![]; + 'outer: for transition in transitions { + match transition { + Transition::CommitTx + | Transition::CommitTxAndBlock + | Transition::Remove(_) => collapsed.push(transition.clone()), + Transition::Insert(key, val) => { + for (ix, collapsed_transition) in + collapsed.iter().enumerate() + { + if let Transition::Remove(remove_key) = + collapsed_transition + { + if key == remove_key { + // remove(key) + insert(key, val) -> update(key, + // val) + + // Replace the Remove with an Update instead of + // inserting the Insert + *collapsed.get_mut(ix).unwrap() = + Transition::Update(*key, val.clone()); + continue 'outer; + } + } + } + collapsed.push(transition.clone()); + } + Transition::Update(key, value) => { + for (ix, collapsed_transition) in + collapsed.iter().enumerate() + { + if let Transition::Insert(insert_key, _) = + collapsed_transition + { + if key == insert_key { + // insert(key, val) + update(key, new_val) -> + // insert(key, new_val) + + // Replace the insert with the new update's + // value instead of inserting it + *collapsed.get_mut(ix).unwrap() = + Transition::Insert(*key, value.clone()); + continue 'outer; + } + } else if let Transition::Update(update_key, _) = + collapsed_transition + { + if key == update_key { + // update(key, val) + update(key, new_val) -> + // update(key, new_val) + + // Replace the insert with the new update's + // value instead of inserting it + *collapsed.get_mut(ix).unwrap() = + Transition::Update(*key, value.clone()); + continue 'outer; + } + } + } + collapsed.push(transition.clone()); + } + } + } + collapsed + } +} diff --git a/tests/src/storage_api/collections/lazy_vec.rs b/tests/src/storage_api/collections/lazy_vec.rs new file mode 100644 index 0000000000..65e08b4ca7 --- /dev/null +++ b/tests/src/storage_api/collections/lazy_vec.rs @@ -0,0 +1,634 @@ +#[cfg(test)] +mod tests { + use std::convert::TryInto; + + use borsh::{BorshDeserialize, BorshSerialize}; + use namada::types::address::{self, Address}; + use namada::types::storage; + use namada_tx_prelude::storage::KeySeg; + use namada_tx_prelude::storage_api::collections::{ + lazy_vec, LazyCollection, LazyVec, + }; + use proptest::prelude::*; + use proptest::prop_state_machine; + use proptest::state_machine::{AbstractStateMachine, StateMachineTest}; + use proptest::test_runner::Config; + use test_log::test; + + use crate::tx::tx_host_env; + use crate::vp::vp_host_env; + + prop_state_machine! { + #![proptest_config(Config { + // Instead of the default 256, we only run 5 because otherwise it + // takes too long and it's preferable to crank up the number of + // transitions instead, to allow each case to run for more epochs as + // some issues only manifest once the model progresses further. + // Additionally, more cases will be explored every time this test is + // executed in the CI. + cases: 5, + .. Config::default() + })] + #[test] + fn lazy_vec_api_state_machine_test(sequential 1..100 => ConcreteLazyVecState); + } + + /// Some borsh-serializable type with arbitrary fields to be used inside + /// LazyVec state machine test + #[derive( + Clone, + Debug, + BorshSerialize, + BorshDeserialize, + PartialEq, + Eq, + PartialOrd, + Ord, + )] + struct TestVecItem { + x: u64, + y: bool, + } + + /// A `StateMachineTest` implemented on this struct manipulates it with + /// `Transition`s, which are also being accumulated into + /// `current_transitions`. It then: + /// + /// - checks its state against an in-memory `std::collections::Vec` + /// - runs validation and checks that the `LazyVec::Action`s reported from + /// validation match with transitions that were applied + /// + /// Additionally, one of the transitions is to commit a block and/or + /// transaction, during which the currently accumulated state changes are + /// persisted, or promoted from transaction write log to block's write log. + #[derive(Debug)] + struct ConcreteLazyVecState { + /// Address is used to prefix the storage key of the `lazy_vec` in + /// order to simulate a transaction and a validity predicate + /// check from changes on the `lazy_vec` + address: Address, + /// In the test, we apply the same transitions on the `lazy_vec` as on + /// `eager_vec` to check that `lazy_vec`'s state is consistent with + /// `eager_vec`. + eager_vec: Vec, + /// Handle to a lazy vec + lazy_vec: LazyVec, + /// Valid LazyVec changes in the current transaction + current_transitions: Vec>, + } + + #[derive(Clone, Debug)] + struct AbstractLazyVecState { + /// Valid LazyVec changes in the current transaction + valid_transitions: Vec>, + /// Valid LazyVec changes committed to storage + committed_transitions: Vec>, + } + + /// Possible transitions that can modify a [`LazyVec`]. This roughly + /// corresponds to the methods that have `StorageWrite` access and is very + /// similar to [`Action`] + #[derive(Clone, Debug)] + pub enum Transition { + /// Commit all valid transitions in the current transaction + CommitTx, + /// Commit all valid transitions in the current transaction and also + /// commit the current block + CommitTxAndBlock, + /// Push a value `T` into a [`LazyVec`] + Push(T), + /// Pop a value from a [`LazyVec`] + Pop, + /// Update a value `T` at index from pre to post state in a + /// [`LazyVec`] + Update { + /// index at which the value is updated + index: lazy_vec::Index, + /// value to update the element to + value: T, + }, + } + + impl AbstractStateMachine for AbstractLazyVecState { + type State = Self; + type Transition = Transition; + + fn init_state() -> BoxedStrategy { + Just(Self { + valid_transitions: vec![], + committed_transitions: vec![], + }) + .boxed() + } + + // Apply a random transition to the state + fn transitions(state: &Self::State) -> BoxedStrategy { + let length = state.len(); + if length == 0 { + prop_oneof![ + 1 => Just(Transition::CommitTx), + 1 => Just(Transition::CommitTxAndBlock), + 3 => arb_test_vec_item().prop_map(Transition::Push) + ] + .boxed() + } else { + let arb_index = || { + let indices: Vec = (0..length).collect(); + proptest::sample::select(indices) + }; + prop_oneof![ + 1 => Just(Transition::CommitTx), + 1 => Just(Transition::CommitTxAndBlock), + 3 => (arb_index(), arb_test_vec_item()).prop_map( + |(index, value)| Transition::Update { index, value } + ), + 3 => Just(Transition::Pop), + 5 => arb_test_vec_item().prop_map(Transition::Push), + ] + .boxed() + } + } + + fn apply_abstract( + mut state: Self::State, + transition: &Self::Transition, + ) -> Self::State { + match transition { + Transition::CommitTx => { + let valid_actions_to_commit = + std::mem::take(&mut state.valid_transitions); + state + .committed_transitions + .extend(valid_actions_to_commit.into_iter()); + } + _ => state.valid_transitions.push(transition.clone()), + } + state + } + + fn preconditions( + state: &Self::State, + transition: &Self::Transition, + ) -> bool { + let length = state.len(); + if length == 0 { + // Ensure that the pop or update transitions are not applied to + // an empty state + !matches!( + transition, + Transition::Pop | Transition::Update { .. } + ) + } else if let Transition::Update { index, .. } = transition { + // Ensure that the update index is a valid one + *index < (length - 1) + } else { + true + } + } + } + + impl StateMachineTest for ConcreteLazyVecState { + type Abstract = AbstractLazyVecState; + type ConcreteState = Self; + + fn init_test( + _initial_state: ::State, + ) -> Self::ConcreteState { + // Init transaction env in which we'll be applying the transitions + tx_host_env::init(); + + // The lazy_vec's path must be prefixed by the address to be able + // to trigger a validity predicate on it + let address = address::testing::established_address_1(); + tx_host_env::with(|env| env.spawn_accounts([&address])); + let lazy_vec_prefix: storage::Key = address.to_db_key().into(); + + Self { + address, + eager_vec: vec![], + lazy_vec: LazyVec::open( + lazy_vec_prefix.push(&"arbitrary".to_string()).unwrap(), + ), + current_transitions: vec![], + } + } + + fn apply_concrete( + mut state: Self::ConcreteState, + transition: ::Transition, + ) -> Self::ConcreteState { + // Apply transitions in transaction env + let ctx = tx_host_env::ctx(); + + // Persist the transitions in the current tx, or clear previous ones + // if we're committing a tx + match &transition { + Transition::CommitTx | Transition::CommitTxAndBlock => { + state.current_transitions = vec![]; + } + _ => { + state.current_transitions.push(transition.clone()); + } + } + + // Transition application on lazy vec and post-conditions: + match &transition { + Transition::CommitTx => { + // commit the tx without committing the block + tx_host_env::with(|env| env.write_log.commit_tx()); + } + Transition::CommitTxAndBlock => { + // commit the tx and the block + tx_host_env::commit_tx_and_block(); + } + Transition::Push(value) => { + let old_len = state.lazy_vec.len(ctx).unwrap(); + + state.lazy_vec.push(ctx, value.clone()).unwrap(); + + // Post-conditions: + let new_len = state.lazy_vec.len(ctx).unwrap(); + let stored_value = + state.lazy_vec.get(ctx, new_len - 1).unwrap().unwrap(); + assert_eq!( + &stored_value, value, + "the new item must be added to the back" + ); + assert_eq!(old_len + 1, new_len, "length must increment"); + + state.assert_validation_accepted(new_len); + } + Transition::Pop => { + let old_len = state.lazy_vec.len(ctx).unwrap(); + + let popped = state.lazy_vec.pop(ctx).unwrap().unwrap(); + + // Post-conditions: + let new_len = state.lazy_vec.len(ctx).unwrap(); + assert_eq!(old_len, new_len + 1, "length must decrement"); + assert_eq!( + &popped, + state.eager_vec.last().unwrap(), + "popped element matches the last element in eager vec \ + before it's updated" + ); + + state.assert_validation_accepted(new_len); + } + Transition::Update { index, value } => { + let old_len = state.lazy_vec.len(ctx).unwrap(); + let old_val = + state.lazy_vec.get(ctx, *index).unwrap().unwrap(); + + state.lazy_vec.update(ctx, *index, value.clone()).unwrap(); + + // Post-conditions: + let new_len = state.lazy_vec.len(ctx).unwrap(); + let new_val = + state.lazy_vec.get(ctx, *index).unwrap().unwrap(); + assert_eq!(old_len, new_len, "length must not change"); + assert_eq!( + &old_val, + state.eager_vec.get(*index as usize).unwrap(), + "old value must match the value at the same index in \ + the eager vec before it's updated" + ); + assert_eq!( + &new_val, value, + "new value must match that which was passed into the \ + Transition::Update" + ); + + state.assert_validation_accepted(new_len); + } + } + + // Apply transition in the eager vec for comparison + apply_transition_on_eager_vec(&mut state.eager_vec, &transition); + + // Global post-conditions: + + // All items in eager vec must be present in lazy vec + for (ix, expected_item) in state.eager_vec.iter().enumerate() { + let got = state + .lazy_vec + .get(ctx, ix as lazy_vec::Index) + .unwrap() + .expect("The expected item must be present in lazy vec"); + assert_eq!(expected_item, &got, "at index {ix}"); + } + + // All items in lazy vec must be present in eager vec + for (ix, expected_item) in + state.lazy_vec.iter(ctx).unwrap().enumerate() + { + let expected_item = expected_item.unwrap(); + let got = state + .eager_vec + .get(ix) + .expect("The expected item must be present in eager vec"); + assert_eq!(&expected_item, got, "at index {ix}"); + } + + state + } + } + + impl AbstractLazyVecState { + /// Find the length of the vector from the applied transitions + fn len(&self) -> u64 { + (vec_len_diff_from_transitions(self.committed_transitions.iter()) + + vec_len_diff_from_transitions(self.valid_transitions.iter())) + .try_into() + .expect( + "It shouldn't be possible to underflow length from all \ + transactions applied in abstract state", + ) + } + } + + /// Find the difference in length of the vector from the applied transitions + fn vec_len_diff_from_transitions<'a>( + all_transitions: impl Iterator>, + ) -> i64 { + let mut push_count: i64 = 0; + let mut pop_count: i64 = 0; + + for trans in all_transitions { + match trans { + Transition::CommitTx + | Transition::CommitTxAndBlock + | Transition::Update { .. } => {} + Transition::Push(_) => push_count += 1, + Transition::Pop => pop_count += 1, + } + } + push_count - pop_count + } + + impl ConcreteLazyVecState { + fn assert_validation_accepted(&self, new_vec_len: u64) { + // Init the VP env from tx env in which we applied the vec + // transitions + let tx_env = tx_host_env::take(); + vp_host_env::init_from_tx(self.address.clone(), tx_env, |_| {}); + + // Simulate a validity predicate run using the lazy vec's validation + // helpers + let changed_keys = + vp_host_env::with(|env| env.all_touched_storage_keys()); + + let mut validation_builder = None; + + // Push followed by pop is a no-op, in which case we'd still see the + // changed keys for these actions, but they wouldn't affect the + // validation result and they never get persisted, but we'd still + // them as changed key here. To guard against this case, + // we check that `vec_len_from_transitions` is not empty. + let vec_len_diff = + vec_len_diff_from_transitions(self.current_transitions.iter()); + + // To help debug validation issues... + dbg!( + &self.current_transitions, + &changed_keys + .iter() + .map(storage::Key::to_string) + .collect::>() + ); + + for key in &changed_keys { + let is_sub_key = self + .lazy_vec + .accumulate( + vp_host_env::ctx(), + &mut validation_builder, + key, + ) + .unwrap(); + + assert!( + is_sub_key, + "We're only modifying the lazy_vec's keys here. Key: \ + \"{key}\", vec length diff {vec_len_diff}" + ); + } + if !changed_keys.is_empty() && vec_len_diff != 0 { + assert!( + validation_builder.is_some(), + "If some keys were changed, the builder must get filled in" + ); + let actions = LazyVec::::validate( + validation_builder.unwrap(), + ) + .expect( + "With valid transitions only, validation should always \ + pass", + ); + let mut actions_to_check = actions.clone(); + + // Check that every transition has a corresponding action from + // validation. We drop the found actions to check that all + // actions are matched too. + let current_transitions = normalize_transitions( + &self.current_transitions, + new_vec_len, + ); + for transition in ¤t_transitions { + match transition { + Transition::CommitTx | Transition::CommitTxAndBlock => { + } + Transition::Push(expected_val) => { + let mut ix = 0; + while ix < actions_to_check.len() { + if let lazy_vec::Action::Push(val) = + &actions_to_check[ix] + { + if expected_val == val { + actions_to_check.remove(ix); + break; + } + } + ix += 1; + } + } + Transition::Pop => { + let mut ix = 0; + while ix < actions_to_check.len() { + if let lazy_vec::Action::Pop(_val) = + &actions_to_check[ix] + { + actions_to_check.remove(ix); + break; + } + ix += 1; + } + } + Transition::Update { + index: expected_index, + value, + } => { + let mut ix = 0; + while ix < actions_to_check.len() { + if let lazy_vec::Action::Update { + index, + pre: _, + post, + } = &actions_to_check[ix] + { + if expected_index == index && post == value + { + actions_to_check.remove(ix); + break; + } + } + ix += 1; + } + } + } + } + + assert!( + actions_to_check.is_empty(), + "All the actions reported from validation {actions:#?} \ + should have been matched with SM transitions \ + {current_transitions:#?}, but these actions didn't \ + match: {actions_to_check:#?}", + ) + } + + // Put the tx_env back before checking the result + tx_host_env::set_from_vp_env(vp_host_env::take()); + } + } + + /// Generate an arbitrary `TestVecItem` + fn arb_test_vec_item() -> impl Strategy { + (any::(), any::()).prop_map(|(x, y)| TestVecItem { x, y }) + } + + /// Apply `Transition` on an eager `Vec`. + fn apply_transition_on_eager_vec( + vec: &mut Vec, + transition: &Transition, + ) { + match transition { + Transition::CommitTx | Transition::CommitTxAndBlock => {} + Transition::Push(value) => vec.push(value.clone()), + Transition::Pop => { + let _popped = vec.pop(); + } + Transition::Update { index, value } => { + let entry = vec.get_mut(*index as usize).unwrap(); + *entry = value.clone(); + } + } + } + + /// Normalize transitions: + /// - pop at ix + push(val) at ix -> update(ix, val) + /// - push(val) at ix + update(ix, new_val) -> push(new_val) at ix + /// - update(ix, val) + update(ix, new_val) -> update(ix, new_val) + /// + /// Note that the normalizable transitions pairs do not have to be directly + /// next to each other, but their order does matter. + fn normalize_transitions( + transitions: &[Transition], + new_vec_len: u64, + ) -> Vec> { + let stack_start_pos = ((new_vec_len as i64) + - vec_len_diff_from_transitions(transitions.iter())) + as u64; + let mut stack_pos = stack_start_pos; + let mut collapsed = vec![]; + 'outer: for transition in transitions { + match transition { + Transition::CommitTx | Transition::CommitTxAndBlock => { + collapsed.push(transition.clone()) + } + Transition::Push(value) => { + // If there are some pops, the last one can be collapsed + // with this push + if stack_pos < stack_start_pos { + // Find the pop from the back + let mut found_ix = None; + for (ix, transition) in + collapsed.iter().enumerate().rev() + { + if let Transition::Pop = transition { + found_ix = Some(ix); + break; + } + } + let ix = found_ix.expect("Pop must be found"); + // pop at ix + push(val) at ix -> update(ix, val) + + // Replace the Pop with an Update and don't insert the + // Push + *collapsed.get_mut(ix).unwrap() = Transition::Update { + index: stack_pos, + value: value.clone(), + }; + } else { + collapsed.push(transition.clone()); + } + stack_pos += 1; + } + Transition::Pop => { + collapsed.push(transition.clone()); + stack_pos -= 1; + } + Transition::Update { index, value } => { + // If there are some pushes, check if one of them is at the + // same index as this update + if stack_pos > stack_start_pos { + let mut current_pos = stack_start_pos; + for (ix, collapsed_transition) in + collapsed.iter().enumerate() + { + match collapsed_transition { + Transition::CommitTx + | Transition::CommitTxAndBlock => {} + Transition::Push(_) => { + if ¤t_pos == index { + // push(val) at `ix` + update(ix, + // new_val) -> + // push(new_val) at `ix` + + // Replace the Push with the new Push of + // Update's + // value and don't insert the Update + *collapsed.get_mut(ix).unwrap() = + Transition::Push(value.clone()); + continue 'outer; + } + current_pos += 1; + } + Transition::Pop => { + current_pos -= 1; + } + Transition::Update { + index: prev_update_index, + value: _, + } => { + if index == prev_update_index { + // update(ix, val) + update(ix, new_val) + // -> update(ix, new_val) + + // Replace the Update with the new + // Update instead of inserting it + *collapsed.get_mut(ix).unwrap() = + transition.clone(); + continue 'outer; + } + } + } + } + } + collapsed.push(transition.clone()) + } + } + } + collapsed + } +} diff --git a/tests/src/storage_api/collections/mod.rs b/tests/src/storage_api/collections/mod.rs new file mode 100644 index 0000000000..f39b880c09 --- /dev/null +++ b/tests/src/storage_api/collections/mod.rs @@ -0,0 +1,3 @@ +mod lazy_map; +mod lazy_vec; +mod nested_lazy_map; diff --git a/tests/src/storage_api/collections/nested_lazy_map.rs b/tests/src/storage_api/collections/nested_lazy_map.rs new file mode 100644 index 0000000000..037decce46 --- /dev/null +++ b/tests/src/storage_api/collections/nested_lazy_map.rs @@ -0,0 +1,723 @@ +#[cfg(test)] +mod tests { + use std::collections::BTreeMap; + use std::convert::TryInto; + + use borsh::{BorshDeserialize, BorshSerialize}; + use namada::types::address::{self, Address}; + use namada::types::storage; + use namada_tx_prelude::storage::KeySeg; + use namada_tx_prelude::storage_api::collections::lazy_map::{ + NestedMap, NestedSubKey, SubKey, + }; + use namada_tx_prelude::storage_api::collections::{ + lazy_map, LazyCollection, LazyMap, + }; + use proptest::prelude::*; + use proptest::prop_state_machine; + use proptest::state_machine::{AbstractStateMachine, StateMachineTest}; + use proptest::test_runner::Config; + use test_log::test; + + use crate::tx::tx_host_env; + use crate::vp::vp_host_env; + + prop_state_machine! { + #![proptest_config(Config { + // Instead of the default 256, we only run 5 because otherwise it + // takes too long and it's preferable to crank up the number of + // transitions instead, to allow each case to run for more epochs as + // some issues only manifest once the model progresses further. + // Additionally, more cases will be explored every time this test is + // executed in the CI. + cases: 5, + .. Config::default() + })] + #[test] + fn nested_lazy_map_api_state_machine_test(sequential 1..100 => ConcreteLazyMapState); + } + + /// Some borsh-serializable type with arbitrary fields to be used inside + /// LazyMap state machine test + #[derive( + Clone, + Debug, + BorshSerialize, + BorshDeserialize, + PartialEq, + Eq, + PartialOrd, + Ord, + )] + struct TestVal { + x: u64, + y: bool, + } + + type KeyOuter = u64; + type KeyMiddle = i32; + type KeyInner = i8; + + type NestedTestMap = + NestedMap>>; + + type NestedEagerMap = + BTreeMap>>; + + /// A `StateMachineTest` implemented on this struct manipulates it with + /// `Transition`s, which are also being accumulated into + /// `current_transitions`. It then: + /// + /// - checks its state against an in-memory `std::collections::HashMap` + /// - runs validation and checks that the `LazyMap::Action`s reported from + /// validation match with transitions that were applied + /// + /// Additionally, one of the transitions is to commit a block and/or + /// transaction, during which the currently accumulated state changes are + /// persisted, or promoted from transaction write log to block's write log. + #[derive(Debug)] + struct ConcreteLazyMapState { + /// Address is used to prefix the storage key of the `lazy_map` in + /// order to simulate a transaction and a validity predicate + /// check from changes on the `lazy_map` + address: Address, + /// In the test, we apply the same transitions on the `lazy_map` as on + /// `eager_map` to check that `lazy_map`'s state is consistent with + /// `eager_map`. + eager_map: NestedEagerMap, + /// Handle to a lazy map with nested lazy collections + lazy_map: NestedTestMap, + /// Valid LazyMap changes in the current transaction + current_transitions: Vec, + } + + #[derive(Clone, Debug, Default)] + struct AbstractLazyMapState { + /// Valid LazyMap changes in the current transaction + valid_transitions: Vec, + /// Valid LazyMap changes committed to storage + committed_transitions: Vec, + } + + /// Possible transitions that can modify a [`NestedTestMap`]. + /// This roughly corresponds to the methods that have `StorageWrite` + /// access and is very similar to [`Action`] + #[derive(Clone, Debug)] + enum Transition { + /// Commit all valid transitions in the current transaction + CommitTx, + /// Commit all valid transitions in the current transaction and also + /// commit the current block + CommitTxAndBlock, + /// Insert a key-val into a [`LazyMap`] + Insert(Key, TestVal), + /// Remove a key-val from a [`LazyMap`] + Remove(Key), + /// Update a value at key from pre to post state in a + /// [`LazyMap`] + Update(Key, TestVal), + } + + /// A key for transition + type Key = (KeyOuter, KeyMiddle, KeyInner); + + impl AbstractStateMachine for AbstractLazyMapState { + type State = Self; + type Transition = Transition; + + fn init_state() -> BoxedStrategy { + Just(Self::default()).boxed() + } + + // Apply a random transition to the state + fn transitions(state: &Self::State) -> BoxedStrategy { + let length = state.len(); + if length == 0 { + prop_oneof![ + 1 => Just(Transition::CommitTx), + 1 => Just(Transition::CommitTxAndBlock), + 3 => (arb_map_key(), arb_map_val()).prop_map(|(key, val)| Transition::Insert(key, val)) + ] + .boxed() + } else { + let keys = state.find_existing_keys(); + let arb_existing_map_key = + || proptest::sample::select(keys.clone()); + prop_oneof![ + 1 => Just(Transition::CommitTx), + 1 => Just(Transition::CommitTxAndBlock), + 3 => (arb_existing_map_key(), arb_map_val()).prop_map(|(key, val)| + Transition::Update(key, val)), + 3 => arb_existing_map_key().prop_map(Transition::Remove), + 5 => (arb_map_key().prop_filter( + "insert on non-existing keys only", + move |key| !keys.contains(key)), arb_map_val()) + .prop_map(|(key, val)| Transition::Insert(key, val)) + ] + .boxed() + } + } + + fn apply_abstract( + mut state: Self::State, + transition: &Self::Transition, + ) -> Self::State { + match transition { + Transition::CommitTx | Transition::CommitTxAndBlock => { + let valid_actions_to_commit = + std::mem::take(&mut state.valid_transitions); + state + .committed_transitions + .extend(valid_actions_to_commit.into_iter()); + } + _ => state.valid_transitions.push(transition.clone()), + } + state + } + + fn preconditions( + state: &Self::State, + transition: &Self::Transition, + ) -> bool { + let length = state.len(); + // Ensure that the remove or update transitions are not applied + // to an empty state + if length == 0 + && matches!( + transition, + Transition::Remove(_) | Transition::Update(_, _) + ) + { + return false; + } + match transition { + Transition::Update(key, _) | Transition::Remove(key) => { + let keys = state.find_existing_keys(); + // Ensure that the update/remove key is an existing one + keys.contains(key) + } + Transition::Insert(key, _) => { + let keys = state.find_existing_keys(); + // Ensure that the insert key is not an existing one + !keys.contains(key) + } + _ => true, + } + } + } + + impl StateMachineTest for ConcreteLazyMapState { + type Abstract = AbstractLazyMapState; + type ConcreteState = Self; + + fn init_test( + _initial_state: ::State, + ) -> Self::ConcreteState { + // Init transaction env in which we'll be applying the transitions + tx_host_env::init(); + + // The lazy_map's path must be prefixed by the address to be able + // to trigger a validity predicate on it + let address = address::testing::established_address_1(); + tx_host_env::with(|env| env.spawn_accounts([&address])); + let lazy_map_prefix: storage::Key = address.to_db_key().into(); + + Self { + address, + eager_map: BTreeMap::new(), + lazy_map: NestedTestMap::open( + lazy_map_prefix.push(&"arbitrary".to_string()).unwrap(), + ), + current_transitions: vec![], + } + } + + fn apply_concrete( + mut state: Self::ConcreteState, + transition: ::Transition, + ) -> Self::ConcreteState { + // Apply transitions in transaction env + let ctx = tx_host_env::ctx(); + + // Persist the transitions in the current tx, or clear previous ones + // if we're committing a tx + match &transition { + Transition::CommitTx | Transition::CommitTxAndBlock => { + state.current_transitions = vec![]; + } + _ => { + state.current_transitions.push(transition.clone()); + } + } + + // Transition application on lazy map and post-conditions: + match &transition { + Transition::CommitTx => { + // commit the tx without committing the block + tx_host_env::with(|env| env.write_log.commit_tx()); + } + Transition::CommitTxAndBlock => { + // commit the tx and the block + tx_host_env::commit_tx_and_block(); + } + Transition::Insert( + (key_outer, key_middle, key_inner), + value, + ) => { + let inner = state.lazy_map.at(key_outer).at(key_middle); + + inner.insert(ctx, *key_inner, value.clone()).unwrap(); + + // Post-conditions: + let stored_value = + inner.get(ctx, key_inner).unwrap().unwrap(); + assert_eq!( + &stored_value, value, + "the new item must be added to the back" + ); + + state.assert_validation_accepted(); + } + Transition::Remove((key_outer, key_middle, key_inner)) => { + let inner = state.lazy_map.at(key_outer).at(key_middle); + + let removed = + inner.remove(ctx, key_inner).unwrap().unwrap(); + + // Post-conditions: + assert_eq!( + &removed, + state + .eager_map + .get(key_outer) + .unwrap() + .get(key_middle) + .unwrap() + .get(key_inner) + .unwrap(), + "removed element matches the value in eager map \ + before it's updated" + ); + + state.assert_validation_accepted(); + } + Transition::Update( + (key_outer, key_middle, key_inner), + value, + ) => { + let inner = state.lazy_map.at(key_outer).at(key_middle); + + let old_val = inner.get(ctx, key_inner).unwrap().unwrap(); + + inner.insert(ctx, *key_inner, value.clone()).unwrap(); + + // Post-conditions: + let new_val = inner.get(ctx, key_inner).unwrap().unwrap(); + assert_eq!( + &old_val, + state + .eager_map + .get(key_outer) + .unwrap() + .get(key_middle) + .unwrap() + .get(key_inner) + .unwrap(), + "old value must match the value at the same key in \ + the eager map before it's updated" + ); + assert_eq!( + &new_val, value, + "new value must match that which was passed into the \ + Transition::Update" + ); + + state.assert_validation_accepted(); + } + } + + // Apply transition in the eager map for comparison + apply_transition_on_eager_map(&mut state.eager_map, &transition); + + // Global post-conditions: + + // All items in eager map must be present in lazy map + for (key_outer, middle) in state.eager_map.iter() { + for (key_middle, inner) in middle { + for (key_inner, expected_item) in inner { + let got = state + .lazy_map + .at(key_outer) + .at(key_middle) + .get(ctx, key_inner) + .unwrap() + .expect( + "The expected item must be present in lazy map", + ); + assert_eq!( + expected_item, &got, + "at key {key_outer}, {key_middle} {key_inner}" + ); + } + } + } + + // All items in lazy map must be present in eager map + for key_val in state.lazy_map.iter(ctx).unwrap() { + let ( + NestedSubKey::Data { + key: key_outer, + nested_sub_key: + NestedSubKey::Data { + key: key_middle, + nested_sub_key: SubKey::Data(key_inner), + }, + }, + expected_val, + ) = key_val.unwrap(); + let got = state + .eager_map + .get(&key_outer) + .unwrap() + .get(&key_middle) + .unwrap() + .get(&key_inner) + .expect("The expected item must be present in eager map"); + assert_eq!( + &expected_val, got, + "at key {key_outer}, {key_middle} {key_inner})" + ); + } + + state + } + } + + impl AbstractLazyMapState { + /// Find the length of the map from the applied transitions + fn len(&self) -> u64 { + (map_len_diff_from_transitions(self.committed_transitions.iter()) + + map_len_diff_from_transitions(self.valid_transitions.iter())) + .try_into() + .expect( + "It shouldn't be possible to underflow length from all \ + transactions applied in abstract state", + ) + } + + /// Build an eager map from the committed and current transitions + fn eager_map(&self) -> NestedEagerMap { + let mut eager_map = BTreeMap::new(); + for transition in &self.committed_transitions { + apply_transition_on_eager_map(&mut eager_map, transition); + } + for transition in &self.valid_transitions { + apply_transition_on_eager_map(&mut eager_map, transition); + } + eager_map + } + + /// Find the keys currently present in the map + fn find_existing_keys(&self) -> Vec { + let outer_map = self.eager_map(); + outer_map + .into_iter() + .fold(vec![], |acc, (outer, middle_map)| { + middle_map.into_iter().fold( + acc, + |mut acc, (middle, inner_map)| { + acc.extend( + inner_map + .into_iter() + .map(|(inner, _)| (outer, middle, inner)), + ); + acc + }, + ) + }) + } + } + + /// Find the difference in length of the map from the applied transitions + fn map_len_diff_from_transitions<'a>( + transitions: impl Iterator, + ) -> i64 { + let mut insert_count: i64 = 0; + let mut remove_count: i64 = 0; + + for trans in transitions { + match trans { + Transition::CommitTx + | Transition::CommitTxAndBlock + | Transition::Update(_, _) => {} + Transition::Insert(_, _) => insert_count += 1, + Transition::Remove(_) => remove_count += 1, + } + } + insert_count - remove_count + } + + impl ConcreteLazyMapState { + fn assert_validation_accepted(&self) { + // Init the VP env from tx env in which we applied the map + // transitions + let tx_env = tx_host_env::take(); + vp_host_env::init_from_tx(self.address.clone(), tx_env, |_| {}); + + // Simulate a validity predicate run using the lazy map's validation + // helpers + let changed_keys = + vp_host_env::with(|env| env.all_touched_storage_keys()); + + let mut validation_builder = None; + + // Push followed by pop is a no-op, in which case we'd still see the + // changed keys for these actions, but they wouldn't affect the + // validation result and they never get persisted, but we'd still + // them as changed key here. To guard against this case, + // we check that `map_len_from_transitions` is not empty. + let map_len_diff = + map_len_diff_from_transitions(self.current_transitions.iter()); + + // To help debug validation issues... + dbg!( + &self.current_transitions, + &changed_keys + .iter() + .map(storage::Key::to_string) + .collect::>() + ); + + for key in &changed_keys { + let is_sub_key = self + .lazy_map + .accumulate( + vp_host_env::ctx(), + &mut validation_builder, + key, + ) + .unwrap(); + + assert!( + is_sub_key, + "We're only modifying the lazy_map's keys here. Key: \ + \"{key}\", map length diff {map_len_diff}" + ); + } + if !changed_keys.is_empty() && map_len_diff != 0 { + assert!( + validation_builder.is_some(), + "If some keys were changed, the builder must get filled in" + ); + let actions = + NestedTestMap::validate(validation_builder.unwrap()) + .unwrap(); + let mut actions_to_check = actions.clone(); + + // Check that every transition has a corresponding action from + // validation. We drop the found actions to check that all + // actions are matched too. + let current_transitions = + normalize_transitions(&self.current_transitions); + for transition in ¤t_transitions { + use lazy_map::Action; + use lazy_map::NestedAction::At; + + match transition { + Transition::CommitTx | Transition::CommitTxAndBlock => { + } + Transition::Insert(expected_key, expected_val) => { + for (ix, action) in + actions_to_check.iter().enumerate() + { + if let At( + key_outer, + At( + key_middle, + Action::Insert(key_inner, val), + ), + ) = action + { + let key = + (*key_outer, *key_middle, *key_inner); + if expected_key == &key + && expected_val == val + { + actions_to_check.remove(ix); + break; + } + } + } + } + Transition::Remove(expected_key) => { + for (ix, action) in + actions_to_check.iter().enumerate() + { + if let At( + key_outer, + At( + key_middle, + Action::Remove(key_inner, _val), + ), + ) = action + { + let key = + (*key_outer, *key_middle, *key_inner); + if expected_key == &key { + actions_to_check.remove(ix); + break; + } + } + } + } + Transition::Update(expected_key, value) => { + for (ix, action) in + actions_to_check.iter().enumerate() + { + if let At( + key_outer, + At( + key_middle, + Action::Update { + key: key_inner, + pre: _, + post, + }, + ), + ) = action + { + let key = + (*key_outer, *key_middle, *key_inner); + if expected_key == &key && post == value { + actions_to_check.remove(ix); + break; + } + } + } + } + } + } + + assert!( + actions_to_check.is_empty(), + "All the actions reported from validation {actions:#?} \ + should have been matched with SM transitions \ + {current_transitions:#?}, but these actions didn't \ + match: {actions_to_check:#?}", + ) + } + + // Put the tx_env back before checking the result + tx_host_env::set_from_vp_env(vp_host_env::take()); + } + } + + /// Generate an arbitrary `TestKey` + fn arb_map_key() -> impl Strategy { + (any::(), any::(), any::()) + } + + /// Generate an arbitrary `TestVal` + fn arb_map_val() -> impl Strategy { + (any::(), any::()).prop_map(|(x, y)| TestVal { x, y }) + } + + /// Apply `Transition` on an eager `Map`. + fn apply_transition_on_eager_map( + map: &mut NestedEagerMap, + transition: &Transition, + ) { + match transition { + Transition::CommitTx | Transition::CommitTxAndBlock => {} + Transition::Insert((key_outer, key_middle, key_inner), value) + | Transition::Update((key_outer, key_middle, key_inner), value) => { + let middle = + map.entry(*key_outer).or_insert_with(Default::default); + let inner = + middle.entry(*key_middle).or_insert_with(Default::default); + inner.insert(*key_inner, value.clone()); + } + Transition::Remove((key_outer, key_middle, key_inner)) => { + let middle = + map.entry(*key_outer).or_insert_with(Default::default); + let inner = + middle.entry(*key_middle).or_insert_with(Default::default); + let _popped = inner.remove(key_inner); + } + } + } + + /// Normalize transitions: + /// - remove(key) + insert(key, val) -> update(key, val) + /// - insert(key, val) + update(key, new_val) -> insert(key, new_val) + /// - update(key, val) + update(key, new_val) -> update(key, new_val) + /// + /// Note that the normalizable transitions pairs do not have to be directly + /// next to each other, but their order does matter. + fn normalize_transitions(transitions: &[Transition]) -> Vec { + let mut collapsed = vec![]; + 'outer: for transition in transitions { + match transition { + Transition::CommitTx + | Transition::CommitTxAndBlock + | Transition::Remove(_) => collapsed.push(transition.clone()), + Transition::Insert(key, val) => { + for (ix, collapsed_transition) in + collapsed.iter().enumerate() + { + if let Transition::Remove(remove_key) = + collapsed_transition + { + if key == remove_key { + // remove(key) + insert(key, val) -> update(key, + // val) + + // Replace the Remove with an Update instead of + // inserting the Insert + *collapsed.get_mut(ix).unwrap() = + Transition::Update(*key, val.clone()); + continue 'outer; + } + } + } + collapsed.push(transition.clone()); + } + Transition::Update(key, value) => { + for (ix, collapsed_transition) in + collapsed.iter().enumerate() + { + if let Transition::Insert(insert_key, _) = + collapsed_transition + { + if key == insert_key { + // insert(key, val) + update(key, new_val) -> + // insert(key, new_val) + + // Replace the insert with the new update's + // value instead of inserting it + *collapsed.get_mut(ix).unwrap() = + Transition::Insert(*key, value.clone()); + continue 'outer; + } + } else if let Transition::Update(update_key, _) = + collapsed_transition + { + if key == update_key { + // update(key, val) + update(key, new_val) -> + // update(key, new_val) + + // Replace the insert with the new update's + // value instead of inserting it + *collapsed.get_mut(ix).unwrap() = + Transition::Update(*key, value.clone()); + continue 'outer; + } + } + } + collapsed.push(transition.clone()); + } + } + } + collapsed + } +} diff --git a/tests/src/storage_api/mod.rs b/tests/src/storage_api/mod.rs new file mode 100644 index 0000000000..bc487bd59e --- /dev/null +++ b/tests/src/storage_api/mod.rs @@ -0,0 +1 @@ +mod collections; diff --git a/tests/src/vm_host_env/ibc.rs b/tests/src/vm_host_env/ibc.rs index 8e6fa0a254..5e45fdc3ad 100644 --- a/tests/src/vm_host_env/ibc.rs +++ b/tests/src/vm_host_env/ibc.rs @@ -1,5 +1,5 @@ use core::time::Duration; -use std::collections::{BTreeSet, HashMap}; +use std::collections::HashMap; use std::str::FromStr; use namada::ibc::applications::ics20_fungible_token_transfer::msgs::transfer::MsgTransfer; @@ -60,24 +60,23 @@ use namada::ledger::ibc::vp::{ use namada::ledger::native_vp::{Ctx, NativeVp}; use namada::ledger::storage::mockdb::MockDB; use namada::ledger::storage::traits::Sha256Hasher; +use namada::ledger::tx_env::TxEnv; use namada::proto::Tx; use namada::tendermint_proto::Protobuf; use namada::types::address::{self, Address, InternalAddress}; use namada::types::ibc::data::FungibleTokenPacketData; -use namada::types::ibc::IbcEvent; -use namada::types::storage::{BlockHash, BlockHeight, Key}; -use namada::types::time::Rfc3339String; +use namada::types::storage::{self, BlockHash, BlockHeight}; use namada::types::token::{self, Amount}; use namada::vm::{wasm, WasmCacheRwAccess}; -use tempfile::TempDir; +use namada_tx_prelude::StorageWrite; -use crate::tx::*; +use crate::tx::{self, *}; const VP_ALWAYS_TRUE_WASM: &str = "../wasm_for_tests/vp_always_true.wasm"; +const ADDRESS: Address = Address::Internal(InternalAddress::Ibc); pub struct TestIbcVp<'a> { pub ibc: Ibc<'a, MockDB, Sha256Hasher, WasmCacheRwAccess>, - pub keys_changed: BTreeSet, } impl<'a> TestIbcVp<'a> { @@ -85,14 +84,16 @@ impl<'a> TestIbcVp<'a> { &self, tx_data: &[u8], ) -> std::result::Result { - self.ibc - .validate_tx(tx_data, &self.keys_changed, &BTreeSet::new()) + self.ibc.validate_tx( + tx_data, + self.ibc.ctx.keys_changed, + self.ibc.ctx.verifiers, + ) } } pub struct TestIbcTokenVp<'a> { pub token: IbcToken<'a, MockDB, Sha256Hasher, WasmCacheRwAccess>, - pub keys_changed: BTreeSet, } impl<'a> TestIbcTokenVp<'a> { @@ -100,82 +101,19 @@ impl<'a> TestIbcTokenVp<'a> { &self, tx_data: &[u8], ) -> std::result::Result { - self.token - .validate_tx(tx_data, &self.keys_changed, &BTreeSet::new()) + self.token.validate_tx( + tx_data, + self.token.ctx.keys_changed, + self.token.ctx.verifiers, + ) } } -pub struct TestIbcActions; - -impl IbcActions for TestIbcActions { - /// Read IBC-related data - fn read_ibc_data(&self, key: &Key) -> Option> { - tx_host_env::read_bytes(key.to_string()) - } - - /// Write IBC-related data - fn write_ibc_data(&self, key: &Key, data: impl AsRef<[u8]>) { - tx_host_env::write_bytes(key.to_string(), data) - } - - /// Delete IBC-related data - fn delete_ibc_data(&self, key: &Key) { - tx_host_env::delete(key.to_string()) - } - - /// Emit an IBC event - fn emit_ibc_event(&self, event: IbcEvent) { - tx_host_env::emit_ibc_event(&event) - } - - fn transfer_token( - &self, - src: &Address, - dest: &Address, - token: &Address, - amount: Amount, - ) { - let src_key = token::balance_key(token, src); - let dest_key = token::balance_key(token, dest); - let src_bal: Option = tx_host_env::read(&src_key.to_string()); - let mut src_bal = src_bal.unwrap_or_else(|| match src { - Address::Internal(InternalAddress::IbcMint) => Amount::max(), - _ => unreachable!(), - }); - src_bal.spend(&amount); - let mut dest_bal: Amount = - tx_host_env::read(&dest_key.to_string()).unwrap_or_default(); - dest_bal.receive(&amount); - match src { - Address::Internal(InternalAddress::IbcMint) => { - tx_host_env::write_temp(&src_key.to_string(), src_bal) - } - Address::Internal(InternalAddress::IbcBurn) => unreachable!(), - _ => tx_host_env::write(&src_key.to_string(), src_bal), - } - match dest { - Address::Internal(InternalAddress::IbcMint) => unreachable!(), - Address::Internal(InternalAddress::IbcBurn) => { - tx_host_env::write_temp(&dest_key.to_string(), dest_bal) - } - _ => tx_host_env::write(&dest_key.to_string(), dest_bal), - } - } - - fn get_height(&self) -> BlockHeight { - tx_host_env::get_block_height() - } - - fn get_header_time(&self) -> Rfc3339String { - tx_host_env::get_block_time() - } -} - -/// Initialize IBC VP by running a transaction. -pub fn init_ibc_vp_from_tx<'a>( +/// Validate an IBC transaction with IBC VP. +pub fn validate_ibc_vp_from_tx<'a>( tx_env: &'a TestTxEnv, tx: &'a Tx, -) -> (TestIbcVp<'a>, TempDir) { +) -> std::result::Result { let (verifiers, keys_changed) = tx_env .write_log .verifiers_and_changed_keys(&tx_env.verifiers); @@ -186,27 +124,30 @@ pub fn init_ibc_vp_from_tx<'a>( addr, verifiers ); } - let (vp_wasm_cache, vp_cache_dir) = + let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); let ctx = Ctx::new( + &ADDRESS, &tx_env.storage, &tx_env.write_log, tx, VpGasMeter::new(0), + &keys_changed, + &verifiers, vp_wasm_cache, ); let ibc = Ibc { ctx }; - (TestIbcVp { ibc, keys_changed }, vp_cache_dir) + TestIbcVp { ibc }.validate(tx.data.as_ref().unwrap()) } -/// Initialize the native token VP for the given address -pub fn init_token_vp_from_tx<'a>( +/// Validate the native token VP for the given address +pub fn validate_token_vp_from_tx<'a>( tx_env: &'a TestTxEnv, tx: &'a Tx, addr: &Address, -) -> (TestIbcTokenVp<'a>, TempDir) { +) -> std::result::Result { let (verifiers, keys_changed) = tx_env .write_log .verifiers_and_changed_keys(&tx_env.verifiers); @@ -217,26 +158,57 @@ pub fn init_token_vp_from_tx<'a>( addr, verifiers ); } - let (vp_wasm_cache, vp_cache_dir) = + let (vp_wasm_cache, _vp_cache_dir) = wasm::compilation_cache::common::testing::cache(); let ctx = Ctx::new( + &ADDRESS, &tx_env.storage, &tx_env.write_log, tx, VpGasMeter::new(0), + &keys_changed, + &verifiers, vp_wasm_cache, ); let token = IbcToken { ctx }; - ( - TestIbcTokenVp { - token, - keys_changed, - }, - vp_cache_dir, - ) -} + TestIbcTokenVp { token }.validate(tx.data.as_ref().unwrap()) +} + +// /// Initialize the native token VP for the given address +// pub fn init_token_vp_from_tx<'a>( +// tx_env: &'a TestTxEnv, +// tx: &'a Tx, +// addr: &Address, +// ) -> (TestIbcTokenVp<'a>, TempDir) { +// let (verifiers, keys_changed) = tx_env +// .write_log +// .verifiers_and_changed_keys(&tx_env.verifiers); +// if !verifiers.contains(addr) { +// panic!( +// "The given token address {} isn't part of the tx verifiers set: \ +// {:#?}", +// addr, verifiers +// ); +// } +// let (vp_wasm_cache, vp_cache_dir) = +// wasm::compilation_cache::common::testing::cache(); + +// let ctx = Ctx::new( +// &ADDRESS, +// &tx_env.storage, +// &tx_env.write_log, +// tx, +// VpGasMeter::new(0), +// &keys_changed, +// &verifiers, +// vp_wasm_cache, +// ); +// let token = IbcToken { ctx }; + +// (TestIbcTokenVp { token }, vp_cache_dir) +// } /// Initialize the test storage. Requires initialized [`tx_host_env::ENV`]. pub fn init_storage() -> (Address, Address) { @@ -251,17 +223,18 @@ pub fn init_storage() -> (Address, Address) { // initialize a token let code = std::fs::read(VP_ALWAYS_TRUE_WASM).expect("cannot load wasm"); - let token = tx_host_env::init_account(code.clone()); + let token = tx::ctx().init_account(code.clone()).unwrap(); // initialize an account - let account = tx_host_env::init_account(code); + let account = tx::ctx().init_account(code).unwrap(); let key = token::balance_key(&token, &account); let init_bal = Amount::from(1_000_000_000u64); - tx_host_env::write(key.to_string(), init_bal); + tx::ctx().write(&key, init_bal).unwrap(); (token, account) } -pub fn prepare_client() -> (ClientId, AnyClientState, HashMap>) { +pub fn prepare_client() +-> (ClientId, AnyClientState, HashMap>) { let mut writes = HashMap::new(); let msg = msg_create_client(); @@ -292,7 +265,7 @@ pub fn prepare_client() -> (ClientId, AnyClientState, HashMap>) { pub fn prepare_opened_connection( client_id: &ClientId, -) -> (ConnectionId, HashMap>) { +) -> (ConnectionId, HashMap>) { let mut writes = HashMap::new(); let conn_id = connection_id(0); @@ -313,7 +286,7 @@ pub fn prepare_opened_connection( pub fn prepare_opened_channel( conn_id: &ConnectionId, is_ordered: bool, -) -> (PortId, ChannelId, HashMap>) { +) -> (PortId, ChannelId, HashMap>) { let mut writes = HashMap::new(); // port diff --git a/tests/src/vm_host_env/mod.rs b/tests/src/vm_host_env/mod.rs index 93db4f6e40..3325c6eb6a 100644 --- a/tests/src/vm_host_env/mod.rs +++ b/tests/src/vm_host_env/mod.rs @@ -26,6 +26,7 @@ mod tests { use namada::ledger::ibc::vp::{ get_dummy_header as tm_dummy_header, Error as IbcError, }; + use namada::ledger::tx_env::TxEnv; use namada::proto::{SignedTxData, Tx}; use namada::tendermint_proto::Protobuf; use namada::types::key::*; @@ -33,16 +34,16 @@ mod tests { use namada::types::time::DateTimeUtc; use namada::types::token::{self, Amount}; use namada::types::{address, key}; - use namada_vm_env::tx_prelude::{ - BorshDeserialize, BorshSerialize, KeyValIterator, + use namada_tx_prelude::{ + BorshDeserialize, BorshSerialize, StorageRead, StorageWrite, }; - use namada_vm_env::vp_prelude::{PostKeyValIterator, PreKeyValIterator}; + use namada_vp_prelude::VpEnv; use prost::Message; use test_log::test; - use super::ibc; - use super::tx::*; - use super::vp::*; + use super::{ibc, tx, vp}; + use crate::tx::{tx_host_env, TestTxEnv}; + use crate::vp::{vp_host_env, TestVpEnv}; // paths to the WASMs used for tests const VP_ALWAYS_TRUE_WASM: &str = "../wasm_for_tests/vp_always_true.wasm"; @@ -53,8 +54,8 @@ mod tests { // The environment must be initialized first tx_host_env::init(); - let key = "key"; - let read_value: Option = tx_host_env::read(key); + let key = storage::Key::parse("key").unwrap(); + let read_value: Option = tx::ctx().read(&key).unwrap(); assert_eq!( None, read_value, "Trying to read a key that doesn't exists shouldn't find any value" @@ -62,9 +63,9 @@ mod tests { // Write some value let value = "test".repeat(4); - tx_host_env::write(key, value.clone()); + tx::ctx().write(&key, value.clone()).unwrap(); - let read_value: Option = tx_host_env::read(key); + let read_value: Option = tx::ctx().read(&key).unwrap(); assert_eq!( Some(value), read_value, @@ -73,8 +74,8 @@ mod tests { ); let value = vec![1_u8; 1000]; - tx_host_env::write(key, value.clone()); - let read_value: Option> = tx_host_env::read(key); + tx::ctx().write(&key, value.clone()).unwrap(); + let read_value: Option> = tx::ctx().read(&key).unwrap(); assert_eq!( Some(value), read_value, @@ -87,18 +88,18 @@ mod tests { // The environment must be initialized first tx_host_env::init(); - let key = "key"; + let key = storage::Key::parse("key").unwrap(); assert!( - !tx_host_env::has_key(key), + !tx::ctx().has_key(&key).unwrap(), "Before a key-value is written, its key shouldn't be found" ); // Write some value let value = "test".to_string(); - tx_host_env::write(key, value); + tx::ctx().write(&key, value).unwrap(); assert!( - tx_host_env::has_key(key), + tx::ctx().has_key(&key).unwrap(), "After a key-value has been written, its key should be found" ); } @@ -112,28 +113,28 @@ mod tests { tx_host_env::set(env); // Trying to delete a key that doesn't exists should be a no-op - let key = "key"; - tx_host_env::delete(key); + let key = storage::Key::parse("key").unwrap(); + tx::ctx().delete(&key).unwrap(); let value = "test".to_string(); - tx_host_env::write(key, value); + tx::ctx().write(&key, value).unwrap(); assert!( - tx_host_env::has_key(key), + tx::ctx().has_key(&key).unwrap(), "After a key-value has been written, its key should be found" ); // Then delete it - tx_host_env::delete(key); + tx::ctx().delete(&key).unwrap(); assert!( - !tx_host_env::has_key(key), + !tx::ctx().has_key(&key).unwrap(), "After a key has been deleted, its key shouldn't be found" ); // Trying to delete a validity predicate should fail - let key = storage::Key::validity_predicate(&test_account).to_string(); + let key = storage::Key::validity_predicate(&test_account); assert!( - panic::catch_unwind(|| { tx_host_env::delete(key) }) + panic::catch_unwind(|| { tx::ctx().delete(&key).unwrap() }) .err() .map(|a| a.downcast_ref::().cloned().unwrap()) .unwrap() @@ -146,19 +147,24 @@ mod tests { // The environment must be initialized first tx_host_env::init(); - let iter: KeyValIterator> = tx_host_env::iter_prefix("empty"); - assert_eq!( - iter.count(), - 0, + let empty_key = storage::Key::parse("empty").unwrap(); + let mut iter = + namada_tx_prelude::iter_prefix_bytes(tx::ctx(), &empty_key) + .unwrap(); + assert!( + iter.next().is_none(), "Trying to iter a prefix that doesn't have any matching keys \ should yield an empty iterator." ); - // Write some values directly into the storage first - let prefix = Key::parse("prefix").unwrap(); + let prefix = storage::Key::parse("prefix").unwrap(); + // We'll write sub-key in some random order to check prefix iter's order + let sub_keys = [2_i32, 1, i32::MAX, -1, 260, -2, i32::MIN, 5, 0]; + + // Write the values directly into the storage first tx_host_env::with(|env| { - for i in 0..10_i32 { - let key = prefix.join(&Key::parse(i.to_string()).unwrap()); + for i in sub_keys.iter() { + let key = prefix.push(i).unwrap(); let value = i.try_to_vec().unwrap(); env.storage.write(&key, value).unwrap(); } @@ -166,10 +172,29 @@ mod tests { }); // Then try to iterate over their prefix - let iter: KeyValIterator = - tx_host_env::iter_prefix(prefix.to_string()); - let expected = (0..10).map(|i| (format!("{}/{}", prefix, i), i)); - itertools::assert_equal(iter.sorted(), expected.sorted()); + let iter = namada_tx_prelude::iter_prefix(tx::ctx(), &prefix) + .unwrap() + .map(Result::unwrap); + + // The order has to be sorted by sub-key value + let expected = sub_keys + .iter() + .sorted() + .map(|i| (prefix.push(i).unwrap(), *i)); + itertools::assert_equal(iter, expected); + + // Try to iterate over their prefix in reverse + let iter = namada_tx_prelude::rev_iter_prefix(tx::ctx(), &prefix) + .unwrap() + .map(Result::unwrap); + + // The order has to be reverse sorted by sub-key value + let expected = sub_keys + .iter() + .sorted() + .rev() + .map(|i| (prefix.push(i).unwrap(), *i)); + itertools::assert_equal(iter, expected); } #[test] @@ -182,7 +207,7 @@ mod tests { "pre-condition" ); let verifier = address::testing::established_address_1(); - tx_host_env::insert_verifier(&verifier); + tx::ctx().insert_verifier(&verifier).unwrap(); assert!( tx_host_env::with(|env| env.verifiers.contains(&verifier)), "The verifier should have been inserted" @@ -201,7 +226,7 @@ mod tests { tx_host_env::init(); let code = vec![]; - tx_host_env::init_account(code); + tx::ctx().init_account(code).unwrap(); } #[test] @@ -211,7 +236,7 @@ mod tests { let code = std::fs::read(VP_ALWAYS_TRUE_WASM).expect("cannot load wasm"); - tx_host_env::init_account(code); + tx::ctx().init_account(code).unwrap(); } #[test] @@ -220,19 +245,19 @@ mod tests { tx_host_env::init(); assert_eq!( - tx_host_env::get_chain_id(), + tx::ctx().get_chain_id().unwrap(), tx_host_env::with(|env| env.storage.get_chain_id().0) ); assert_eq!( - tx_host_env::get_block_height(), + tx::ctx().get_block_height().unwrap(), tx_host_env::with(|env| env.storage.get_block_height().0) ); assert_eq!( - tx_host_env::get_block_hash(), + tx::ctx().get_block_hash().unwrap(), tx_host_env::with(|env| env.storage.get_block_hash().0) ); assert_eq!( - tx_host_env::get_block_epoch(), + tx::ctx().get_block_epoch().unwrap(), tx_host_env::with(|env| env.storage.get_current_epoch().0) ); } @@ -245,16 +270,16 @@ mod tests { // We can add some data to the environment let key_raw = "key"; - let key = Key::parse(key_raw).unwrap(); + let key = storage::Key::parse(key_raw).unwrap(); let value = "test".to_string(); let value_raw = value.try_to_vec().unwrap(); vp_host_env::with(|env| { env.write_log.write(&key, value_raw.clone()).unwrap() }); - let read_pre_value: Option = vp_host_env::read_pre(key_raw); + let read_pre_value: Option = vp::CTX.read_pre(&key).unwrap(); assert_eq!(None, read_pre_value); - let read_post_value: Option = vp_host_env::read_post(key_raw); + let read_post_value: Option = vp::CTX.read_post(&key).unwrap(); assert_eq!(Some(value), read_post_value); } @@ -263,12 +288,11 @@ mod tests { let mut tx_env = TestTxEnv::default(); let addr = address::testing::established_address_1(); - let addr_key = Key::from(addr.to_db_key()); + let addr_key = storage::Key::from(addr.to_db_key()); // Write some value to storage let existing_key = addr_key.join(&Key::parse("existing_key_raw").unwrap()); - let existing_key_raw = existing_key.to_string(); let existing_value = vec![2_u8; 1000]; // Values written to storage have to be encoded with Borsh let existing_value_encoded = existing_value.try_to_vec().unwrap(); @@ -280,25 +304,24 @@ mod tests { // In a transaction, write override the existing key's value and add // another key-value let override_value = "override".to_string(); - let new_key = - addr_key.join(&Key::parse("new_key").unwrap()).to_string(); + let new_key = addr_key.join(&Key::parse("new_key").unwrap()); let new_value = "vp".repeat(4); // Initialize the VP environment via a transaction vp_host_env::init_from_tx(addr, tx_env, |_addr| { // Override the existing key - tx_host_env::write(&existing_key_raw, &override_value); + tx::ctx().write(&existing_key, &override_value).unwrap(); // Write the new key-value - tx_host_env::write(&new_key, new_value.clone()); + tx::ctx().write(&new_key, new_value.clone()).unwrap(); }); assert!( - vp_host_env::has_key_pre(&existing_key_raw), + vp::CTX.has_key_pre(&existing_key).unwrap(), "The existing key before transaction should be found" ); let pre_existing_value: Option> = - vp_host_env::read_pre(&existing_key_raw); + vp::CTX.read_pre(&existing_key).unwrap(); assert_eq!( Some(existing_value), pre_existing_value, @@ -307,10 +330,11 @@ mod tests { ); assert!( - !vp_host_env::has_key_pre(&new_key), + !vp::CTX.has_key_pre(&new_key).unwrap(), "The new key before transaction shouldn't be found" ); - let pre_new_value: Option> = vp_host_env::read_pre(&new_key); + let pre_new_value: Option> = + vp::CTX.read_pre(&new_key).unwrap(); assert_eq!( None, pre_new_value, "The new value read from state before transaction shouldn't yet \ @@ -318,11 +342,11 @@ mod tests { ); assert!( - vp_host_env::has_key_post(&existing_key_raw), + vp::CTX.has_key_post(&existing_key).unwrap(), "The existing key after transaction should still be found" ); let post_existing_value: Option = - vp_host_env::read_post(&existing_key_raw); + vp::CTX.read_post(&existing_key).unwrap(); assert_eq!( Some(override_value), post_existing_value, @@ -331,10 +355,11 @@ mod tests { ); assert!( - vp_host_env::has_key_post(&new_key), + vp::CTX.has_key_post(&new_key).unwrap(), "The new key after transaction should be found" ); - let post_new_value: Option = vp_host_env::read_post(&new_key); + let post_new_value: Option = + vp::CTX.read_post(&new_key).unwrap(); assert_eq!( Some(new_value), post_new_value, @@ -348,12 +373,15 @@ mod tests { let mut tx_env = TestTxEnv::default(); let addr = address::testing::established_address_1(); - let addr_key = Key::from(addr.to_db_key()); + let addr_key = storage::Key::from(addr.to_db_key()); - // Write some value to storage let prefix = addr_key.join(&Key::parse("prefix").unwrap()); - for i in 0..10_i32 { - let key = prefix.join(&Key::parse(i.to_string()).unwrap()); + // We'll write sub-key in some random order to check prefix iter's order + let sub_keys = [2_i32, 1, i32::MAX, -1, 260, -2, i32::MIN, 5, 0]; + + // Write some values to storage + for i in sub_keys.iter() { + let key = prefix.push(i).unwrap(); let value = i.try_to_vec().unwrap(); tx_env.storage.write(&key, value).unwrap(); } @@ -361,32 +389,54 @@ mod tests { // In a transaction, write override the existing key's value and add // another key-value - let existing_key = prefix.join(&Key::parse(5.to_string()).unwrap()); - let existing_key_raw = existing_key.to_string(); - let new_key = prefix.join(&Key::parse(11.to_string()).unwrap()); - let new_key_raw = new_key.to_string(); + let existing_key = prefix.push(&5).unwrap(); + let new_key = prefix.push(&11).unwrap(); // Initialize the VP environment via a transaction vp_host_env::init_from_tx(addr, tx_env, |_addr| { // Override one of the existing keys - tx_host_env::write(&existing_key_raw, 100_i32); + tx::ctx().write(&existing_key, 100_i32).unwrap(); // Write the new key-value under the same prefix - tx_host_env::write(&new_key_raw, 11.try_to_vec().unwrap()); + tx::ctx().write(&new_key, 11_i32).unwrap(); }); - let iter_pre: PreKeyValIterator = - vp_host_env::iter_prefix_pre(prefix.to_string()); - let expected_pre = (0..10).map(|i| (format!("{}/{}", prefix, i), i)); - itertools::assert_equal(iter_pre.sorted(), expected_pre.sorted()); - - let iter_post: PostKeyValIterator = - vp_host_env::iter_prefix_post(prefix.to_string()); - let expected_post = (0..10).map(|i| { - let val = if i == 5 { 100 } else { i }; - (format!("{}/{}", prefix, i), val) + let ctx_pre = vp::CTX.pre(); + let iter_pre = namada_vp_prelude::iter_prefix(&ctx_pre, &prefix) + .unwrap() + .map(|item| item.unwrap()); + + // The order in pre has to be sorted by sub-key value + let expected_pre = sub_keys + .iter() + .sorted() + .map(|i| (prefix.push(i).unwrap(), *i)); + itertools::assert_equal(iter_pre, expected_pre); + + let ctx_post = vp::CTX.post(); + let iter_post = namada_vp_prelude::iter_prefix(&ctx_post, &prefix) + .unwrap() + .map(|item| item.unwrap()); + + // The order in post also has to be sorted + let expected_post = sub_keys.iter().sorted().map(|i| { + let val = if *i == 5 { 100 } else { *i }; + (prefix.push(i).unwrap(), val) }); - itertools::assert_equal(iter_post.sorted(), expected_post.sorted()); + itertools::assert_equal(iter_post, expected_post); + + // Try to iterate over their prefix in reverse + let iter_pre = namada_vp_prelude::rev_iter_prefix(&ctx_pre, &prefix) + .unwrap() + .map(|item| item.unwrap()); + + // The order in has to be reverse sorted by sub-key value + let expected_pre = sub_keys + .iter() + .sorted() + .rev() + .map(|i| (prefix.push(i).unwrap(), *i)); + itertools::assert_equal(iter_pre, expected_pre); } #[test] @@ -421,13 +471,21 @@ mod tests { .expect("decoding signed data we just signed") }); assert_eq!(&signed_tx_data.data, data); - assert!(vp_host_env::verify_tx_signature(&pk, &signed_tx_data.sig)); + assert!( + vp::CTX + .verify_tx_signature(&pk, &signed_tx_data.sig) + .unwrap() + ); let other_keypair = key::testing::keypair_2(); - assert!(!vp_host_env::verify_tx_signature( - &other_keypair.ref_to(), - &signed_tx_data.sig - )); + assert!( + !vp::CTX + .verify_tx_signature( + &other_keypair.ref_to(), + &signed_tx_data.sig + ) + .unwrap() + ); } } @@ -437,19 +495,19 @@ mod tests { vp_host_env::init(); assert_eq!( - vp_host_env::get_chain_id(), + vp::CTX.get_chain_id().unwrap(), vp_host_env::with(|env| env.storage.get_chain_id().0) ); assert_eq!( - vp_host_env::get_block_height(), + vp::CTX.get_block_height().unwrap(), vp_host_env::with(|env| env.storage.get_block_height().0) ); assert_eq!( - vp_host_env::get_block_hash(), + vp::CTX.get_block_hash().unwrap(), vp_host_env::with(|env| env.storage.get_block_hash().0) ); assert_eq!( - vp_host_env::get_block_epoch(), + vp::CTX.get_block_epoch().unwrap(), vp_host_env::with(|env| env.storage.get_current_epoch().0) ); } @@ -462,14 +520,14 @@ mod tests { // evaluating without any code should fail let empty_code = vec![]; let input_data = vec![]; - let result = vp_host_env::eval(empty_code, input_data); + let result = vp::CTX.eval(empty_code, input_data).unwrap(); assert!(!result); // evaluating the VP template which always returns `true` should pass let code = std::fs::read(VP_ALWAYS_TRUE_WASM).expect("cannot load wasm"); let input_data = vec![]; - let result = vp_host_env::eval(code, input_data); + let result = vp::CTX.eval(code, input_data).unwrap(); assert!(result); // evaluating the VP template which always returns `false` shouldn't @@ -477,7 +535,7 @@ mod tests { let code = std::fs::read(VP_ALWAYS_FALSE_WASM).expect("cannot load wasm"); let input_data = vec![]; - let result = vp_host_env::eval(code, input_data); + let result = vp::CTX.eval(code, input_data).unwrap(); assert!(!result); } @@ -503,25 +561,25 @@ mod tests { .sign(&key::testing::keypair_1()); // get and increment the connection counter let counter_key = ibc::client_counter_key(); - let counter = ibc::TestIbcActions + let counter = tx::ctx() .get_and_inc_counter(&counter_key) .expect("getting the counter failed"); let client_id = ibc::client_id(msg.client_state.client_type(), counter) .expect("invalid client ID"); // only insert a client type - let client_type_key = ibc::client_type_key(&client_id).to_string(); - tx_host_env::write( - &client_type_key, - msg.client_state.client_type().as_str().as_bytes(), - ); + let client_type_key = ibc::client_type_key(&client_id); + tx::ctx() + .write( + &client_type_key, + msg.client_state.client_type().as_str().as_bytes(), + ) + .unwrap(); // Check should fail due to no client state let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); assert!(matches!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect_err("validation succeeded unexpectedly"), + result.expect_err("validation succeeded unexpectedly"), IbcError::ClientError(_), )); // drop the transaction @@ -540,18 +598,14 @@ mod tests { .sign(&key::testing::keypair_1()); // create a client with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("creating a client failed"); // Check let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Commit env.commit_tx_and_block(); @@ -582,27 +636,28 @@ mod tests { let same_client_state = old_data.client_state.clone(); let height = same_client_state.latest_height(); let same_consensus_state = old_data.consensus_state; - let client_state_key = ibc::client_state_key(&client_id).to_string(); - tx_host_env::write_bytes( - &client_state_key, - same_client_state.encode_vec().unwrap(), - ); - let consensus_state_key = - ibc::consensus_state_key(&client_id, height).to_string(); - tx_host_env::write( - &consensus_state_key, - same_consensus_state.encode_vec().unwrap(), - ); + let client_state_key = ibc::client_state_key(&client_id); + tx::ctx() + .write_bytes( + &client_state_key, + same_client_state.encode_vec().unwrap(), + ) + .unwrap(); + let consensus_state_key = ibc::consensus_state_key(&client_id, height); + tx::ctx() + .write( + &consensus_state_key, + same_consensus_state.encode_vec().unwrap(), + ) + .unwrap(); let event = ibc::make_update_client_event(&client_id, &msg); - tx_host_env::emit_ibc_event(&event.try_into().unwrap()); + TxEnv::emit_ibc_event(tx::ctx(), &event.try_into().unwrap()).unwrap(); // Check should fail due to the invalid updating let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); assert!(matches!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect_err("validation succeeded unexpectedly"), + result.expect_err("validation succeeded unexpectedly"), IbcError::ClientError(_), )); // drop the transaction @@ -620,18 +675,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // update the client with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("updating the client failed"); // Check let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Commit env.commit_tx_and_block(); @@ -653,18 +704,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // upgrade the client with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("upgrading the client failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); } #[test] @@ -696,25 +743,25 @@ mod tests { .sign(&key::testing::keypair_1()); // get and increment the connection counter let counter_key = ibc::connection_counter_key(); - let counter = ibc::TestIbcActions + let counter = tx::ctx() .get_and_inc_counter(&counter_key) .expect("getting the counter failed"); // insert a new opened connection let conn_id = ibc::connection_id(counter); - let conn_key = ibc::connection_key(&conn_id).to_string(); + let conn_key = ibc::connection_key(&conn_id); let mut connection = ibc::init_connection(&msg); ibc::open_connection(&mut connection); - tx_host_env::write_bytes(&conn_key, connection.encode_vec().unwrap()); + tx::ctx() + .write_bytes(&conn_key, connection.encode_vec().unwrap()) + .unwrap(); let event = ibc::make_open_init_connection_event(&conn_id, &msg); - tx_host_env::emit_ibc_event(&event.try_into().unwrap()); + TxEnv::emit_ibc_event(tx::ctx(), &event.try_into().unwrap()).unwrap(); // Check should fail due to directly opening a connection let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); assert!(matches!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect_err("validation succeeded unexpectedly"), + result.expect_err("validation succeeded unexpectedly"), IbcError::ConnectionError(_), )); // drop the transaction @@ -732,18 +779,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // init a connection with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("creating a connection failed"); // Check let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Commit env.commit_tx_and_block(); @@ -762,18 +805,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // open the connection with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("opening the connection failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); } #[test] @@ -802,18 +841,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // open try a connection with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("creating a connection failed"); // Check let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Commit env.commit_tx_and_block(); @@ -833,18 +868,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // open the connection with the mssage - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("opening the connection failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); } #[test] @@ -880,27 +911,24 @@ mod tests { // not bind a port // get and increment the channel counter let counter_key = ibc::channel_counter_key(); - let counter = ibc::TestIbcActions + let counter = tx::ctx() .get_and_inc_counter(&counter_key) .expect("getting the counter failed"); // channel let channel_id = ibc::channel_id(counter); let port_channel_id = ibc::port_channel_id(port_id, channel_id); - let channel_key = ibc::channel_key(&port_channel_id).to_string(); - tx_host_env::write_bytes( - &channel_key, - msg.channel.encode_vec().unwrap(), - ); + let channel_key = ibc::channel_key(&port_channel_id); + tx::ctx() + .write_bytes(&channel_key, msg.channel.encode_vec().unwrap()) + .unwrap(); let event = ibc::make_open_init_channel_event(&channel_id, &msg); - tx_host_env::emit_ibc_event(&event.try_into().unwrap()); + TxEnv::emit_ibc_event(tx::ctx(), &event.try_into().unwrap()).unwrap(); // Check should fail due to no port binding let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); assert!(matches!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect_err("validation succeeded unexpectedly"), + result.expect_err("validation succeeded unexpectedly"), IbcError::ChannelError(_), )); // drop the transaction @@ -922,32 +950,32 @@ mod tests { } .sign(&key::testing::keypair_1()); // bind a port - ibc::TestIbcActions + tx::ctx() .bind_port(&port_id) .expect("binding the port failed"); // get and increment the channel counter let counter_key = ibc::channel_counter_key(); - let counter = ibc::TestIbcActions + let counter = tx::ctx() .get_and_inc_counter(&counter_key) .expect("getting the counter failed"); // insert a opened channel let channel_id = ibc::channel_id(counter); let port_channel_id = ibc::port_channel_id(port_id, channel_id); - let channel_key = ibc::channel_key(&port_channel_id).to_string(); + let channel_key = ibc::channel_key(&port_channel_id); let mut channel = msg.channel.clone(); ibc::open_channel(&mut channel); - tx_host_env::write_bytes(&channel_key, channel.encode_vec().unwrap()); + tx::ctx() + .write_bytes(&channel_key, channel.encode_vec().unwrap()) + .unwrap(); let event = ibc::make_open_init_channel_event(&channel_id, &msg); - tx_host_env::emit_ibc_event(&event.try_into().unwrap()); + TxEnv::emit_ibc_event(tx::ctx(), &event.try_into().unwrap()).unwrap(); // Check should fail due to directly opening a channel let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); assert!(matches!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect_err("validation succeeded unexpectedly"), + result.expect_err("validation succeeded unexpectedly"), IbcError::ChannelError(_), )); // drop the transaction @@ -966,18 +994,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // init a channel with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("creating a channel failed"); // Check let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Commit env.commit_tx_and_block(); @@ -994,18 +1018,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // open the channle with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("opening the channel failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); } #[test] @@ -1036,18 +1056,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // try open a channel with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("creating a channel failed"); // Check let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Commit env.commit_tx_and_block(); @@ -1065,18 +1081,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // open a channel with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("opening the channel failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); } #[test] @@ -1109,18 +1121,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // close the channel with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("closing the channel failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); } #[test] @@ -1154,18 +1162,14 @@ mod tests { .sign(&key::testing::keypair_1()); // close the channel with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("closing the channel failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); } #[test] @@ -1202,18 +1206,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // send the token and a packet with the data - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("sending a packet failed"); // Check let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Check if the token was escrowed let escrow = address::Address::Internal( address::InternalAddress::ibc_escrow_address( @@ -1221,12 +1221,9 @@ mod tests { msg.source_channel.to_string(), ), ); - let (token_vp, _) = ibc::init_token_vp_from_tx(&env, &tx, &escrow); - assert!( - token_vp - .validate(tx.data.as_ref().unwrap()) - .expect("token validation failed unexpectedly") - ); + let token_vp_result = + ibc::validate_token_vp_from_tx(&env, &tx, &escrow); + assert!(token_vp_result.expect("token validation failed unexpectedly")); // Commit env.commit_tx_and_block(); @@ -1246,18 +1243,14 @@ mod tests { } .sign(&key::testing::keypair_1()); // ack the packet with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("the packet ack failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); } #[test] @@ -1292,27 +1285,19 @@ mod tests { } .sign(&key::testing::keypair_1()); // send the token and a packet with the data - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("sending a packet failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Check if the token was burned let burn = address::Address::Internal(address::InternalAddress::IbcBurn); - let (token_vp, _) = ibc::init_token_vp_from_tx(&env, &tx, &burn); - assert!( - token_vp - .validate(tx.data.as_ref().unwrap()) - .expect("token validation failed unexpectedly") - ); + let result = ibc::validate_token_vp_from_tx(&env, &tx, &burn); + assert!(result.expect("token validation failed unexpectedly")); } #[test] @@ -1354,27 +1339,19 @@ mod tests { } .sign(&key::testing::keypair_1()); // receive a packet with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("receiving a packet failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Check if the token was minted let mint = address::Address::Internal(address::InternalAddress::IbcMint); - let (token_vp, _) = ibc::init_token_vp_from_tx(&env, &tx, &mint); - assert!( - token_vp - .validate(tx.data.as_ref().unwrap()) - .expect("token validation failed unexpectedly") - ); + let result = ibc::validate_token_vp_from_tx(&env, &tx, &mint); + assert!(result.expect("token validation failed unexpectedly")); } #[test] @@ -1436,25 +1413,17 @@ mod tests { } .sign(&key::testing::keypair_1()); // receive a packet with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("receiving a packet failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Check if the token was unescrowed - let (token_vp, _) = ibc::init_token_vp_from_tx(&env, &tx, &escrow); - assert!( - token_vp - .validate(tx.data.as_ref().unwrap()) - .expect("token validation failed unexpectedly") - ); + let result = ibc::validate_token_vp_from_tx(&env, &tx, &escrow); + assert!(result.expect("token validation failed unexpectedly")); } #[test] @@ -1491,20 +1460,16 @@ mod tests { } .sign(&key::testing::keypair_1()); // send a packet with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("sending a packet failed"); // the transaction does something before senging a packet // Check let mut env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Commit env.commit_tx_and_block(); @@ -1524,20 +1489,16 @@ mod tests { } .sign(&key::testing::keypair_1()); // ack the packet with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("the packet ack failed"); // the transaction does something after the ack // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); } #[test] @@ -1579,20 +1540,16 @@ mod tests { } .sign(&key::testing::keypair_1()); // receive a packet with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("receiving a packet failed"); // the transaction does something according to the packet // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); } #[test] @@ -1624,8 +1581,8 @@ mod tests { .encode(&mut tx_data) .expect("encoding failed"); // send a packet with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("sending apacket failed"); // Commit @@ -1646,18 +1603,14 @@ mod tests { .sign(&key::testing::keypair_1()); // close the channel with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("closing the channel failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Check if the token was refunded let escrow = address::Address::Internal( address::InternalAddress::ibc_escrow_address( @@ -1665,12 +1618,8 @@ mod tests { packet.source_channel.to_string(), ), ); - let (token_vp, _) = ibc::init_token_vp_from_tx(&env, &tx, &escrow); - assert!( - token_vp - .validate(tx.data.as_ref().unwrap()) - .expect("token validation failed unexpectedly") - ); + let result = ibc::validate_token_vp_from_tx(&env, &tx, &escrow); + assert!(result.expect("token validation failed unexpectedly")); } #[test] @@ -1701,8 +1650,8 @@ mod tests { .encode(&mut tx_data) .expect("encoding failed"); // send a packet with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("sending a packet failed"); // Commit @@ -1723,18 +1672,14 @@ mod tests { .sign(&key::testing::keypair_1()); // close the channel with the message - ibc::TestIbcActions - .dispatch(&tx_data) + tx::ctx() + .dispatch_ibc_action(&tx_data) .expect("closing the channel failed"); // Check let env = tx_host_env::take(); - let (ibc_vp, _) = ibc::init_ibc_vp_from_tx(&env, &tx); - assert!( - ibc_vp - .validate(tx.data.as_ref().unwrap()) - .expect("validation failed unexpectedly") - ); + let result = ibc::validate_ibc_vp_from_tx(&env, &tx); + assert!(result.expect("validation failed unexpectedly")); // Check if the token was refunded let escrow = address::Address::Internal( address::InternalAddress::ibc_escrow_address( @@ -1742,11 +1687,7 @@ mod tests { packet.source_channel.to_string(), ), ); - let (token_vp, _) = ibc::init_token_vp_from_tx(&env, &tx, &escrow); - assert!( - token_vp - .validate(tx.data.as_ref().unwrap()) - .expect("token validation failed unexpectedly") - ); + let result = ibc::validate_token_vp_from_tx(&env, &tx, &escrow); + assert!(result.expect("token validation failed unexpectedly")); } } diff --git a/tests/src/vm_host_env/tx.rs b/tests/src/vm_host_env/tx.rs index 3a684e8382..3eb674946a 100644 --- a/tests/src/vm_host_env/tx.rs +++ b/tests/src/vm_host_env/tx.rs @@ -15,16 +15,27 @@ use namada::types::{key, token}; use namada::vm::prefix_iter::PrefixIterators; use namada::vm::wasm::{self, TxCache, VpCache}; use namada::vm::{self, WasmCacheRwAccess}; -use namada_vm_env::tx_prelude::BorshSerialize; +use namada_tx_prelude::{BorshSerialize, Ctx}; use tempfile::TempDir; +use crate::vp::TestVpEnv; + +/// Tx execution context provides access to host env functions +static mut CTX: Ctx = unsafe { Ctx::new() }; + +/// Tx execution context provides access to host env functions +pub fn ctx() -> &'static mut Ctx { + unsafe { &mut CTX } +} + /// This module combines the native host function implementations from /// `native_tx_host_env` with the functions exposed to the tx wasm /// that will call to the native functions, instead of interfacing via a /// wasm runtime. It can be used for host environment integration tests. pub mod tx_host_env { - pub use namada_vm_env::tx_prelude::*; + pub use namada_tx_prelude::*; + pub use super::ctx; pub use super::native_tx_host_env::*; } @@ -100,14 +111,23 @@ impl TestTxEnv { ); } - /// Fake accounts existence by initializating their VP storage. - /// This is needed for accounts that are being modified by a tx test to be - /// pass account existence check in `tx_write` function. + /// Fake accounts' existence by initializing their VP storage. + /// This is needed for accounts that are being modified by a tx test to + /// pass account existence check in `tx_write` function. Only established + /// addresses ([`Address::Established`]) have their VP storage initialized, + /// as other types of accounts should not have wasm VPs in storage in any + /// case. pub fn spawn_accounts( &mut self, addresses: impl IntoIterator>, ) { for address in addresses { + if matches!( + address.borrow(), + Address::Internal(_) | Address::Implicit(_) + ) { + continue; + } let key = Key::validity_predicate(address.borrow()); let vp_code = vec![]; self.storage @@ -226,6 +246,29 @@ mod native_tx_host_env { with(|env| env.commit_tx_and_block()) } + /// Set the [`TestTxEnv`] back from a [`TestVpEnv`]. This is useful when + /// testing validation with multiple transactions that accumulate some state + /// changes. + pub fn set_from_vp_env(vp_env: TestVpEnv) { + let TestVpEnv { + storage, + write_log, + tx, + vp_wasm_cache, + vp_cache_dir, + .. + } = vp_env; + let tx_env = TestTxEnv { + storage, + write_log, + vp_wasm_cache, + vp_cache_dir, + tx, + ..Default::default() + }; + set(tx_env); + } + /// A helper macro to create implementations of the host environment /// functions exported to wasm, which uses the environment from the /// `ENV` variable. @@ -326,6 +369,7 @@ mod native_tx_host_env { )); native_host_fn!(tx_delete(key_ptr: u64, key_len: u64)); native_host_fn!(tx_iter_prefix(prefix_ptr: u64, prefix_len: u64) -> u64); + native_host_fn!(tx_rev_iter_prefix(prefix_ptr: u64, prefix_len: u64) -> u64); native_host_fn!(tx_iter_next(iter_id: u64) -> i64); native_host_fn!(tx_insert_verifier(addr_ptr: u64, addr_len: u64)); native_host_fn!(tx_update_validity_predicate( diff --git a/tests/src/vm_host_env/vp.rs b/tests/src/vm_host_env/vp.rs index 06cd6d0981..f4f6e766d6 100644 --- a/tests/src/vm_host_env/vp.rs +++ b/tests/src/vm_host_env/vp.rs @@ -10,17 +10,27 @@ use namada::types::storage::{self, Key}; use namada::vm::prefix_iter::PrefixIterators; use namada::vm::wasm::{self, VpCache}; use namada::vm::{self, WasmCacheRwAccess}; +use namada_vp_prelude::Ctx; use tempfile::TempDir; use crate::tx::{tx_host_env, TestTxEnv}; +/// VP execution context provides access to host env functions +pub static CTX: Ctx = unsafe { Ctx::new() }; + +/// VP execution context provides access to host env functions +pub fn ctx() -> &'static Ctx { + &CTX +} + /// This module combines the native host function implementations from /// `native_vp_host_env` with the functions exposed to the vp wasm /// that will call to the native functions, instead of interfacing via a /// wasm runtime. It can be used for host environment integration tests. pub mod vp_host_env { - pub use namada_vm_env::vp_prelude::*; + pub use namada_vp_prelude::*; + pub use super::ctx; pub use super::native_vp_host_env::*; } @@ -160,7 +170,7 @@ mod native_vp_host_env { /// Initialize the VP host environment in [`ENV`] by running a transaction. /// The transaction is expected to modify the storage sub-space of the given /// address `addr` or to add it to the set of verifiers using - /// [`tx_host_env::insert_verifier`]. + /// `ctx.insert_verifier`. pub fn init_from_tx( addr: Address, mut tx_env: TestTxEnv, @@ -316,6 +326,7 @@ mod native_vp_host_env { native_host_fn!(vp_has_key_pre(key_ptr: u64, key_len: u64) -> i64); native_host_fn!(vp_has_key_post(key_ptr: u64, key_len: u64) -> i64); native_host_fn!(vp_iter_prefix(prefix_ptr: u64, prefix_len: u64) -> u64); + native_host_fn!(vp_rev_iter_prefix(prefix_ptr: u64, prefix_len: u64) -> u64); native_host_fn!(vp_iter_pre_next(iter_id: u64) -> i64); native_host_fn!(vp_iter_post_next(iter_id: u64) -> i64); native_host_fn!(vp_get_chain_id(result_ptr: u64)); diff --git a/tx_prelude/Cargo.toml b/tx_prelude/Cargo.toml index 76419f417a..992a2146e1 100644 --- a/tx_prelude/Cargo.toml +++ b/tx_prelude/Cargo.toml @@ -4,11 +4,15 @@ edition = "2021" license = "GPL-3.0" name = "namada_tx_prelude" resolver = "2" -version = "0.7.1" +version = "0.8.1" [features] default = [] [dependencies] +namada = {path = "../shared"} namada_vm_env = {path = "../vm_env"} +namada_macros = {path = "../macros"} +borsh = "0.9.0" sha2 = "0.10.1" +thiserror = "1.0.30" diff --git a/tx_prelude/src/governance.rs b/tx_prelude/src/governance.rs new file mode 100644 index 0000000000..2dbab74a9e --- /dev/null +++ b/tx_prelude/src/governance.rs @@ -0,0 +1,79 @@ +//! Governance + +use namada::ledger::governance::storage; +use namada::ledger::governance::vp::ADDRESS as governance_address; +use namada::types::address::xan as m1t; +use namada::types::token::Amount; +use namada::types::transaction::governance::{ + InitProposalData, VoteProposalData, +}; + +use super::*; +use crate::token::transfer; + +/// A proposal creation transaction. +pub fn init_proposal(ctx: &mut Ctx, data: InitProposalData) -> TxResult { + let counter_key = storage::get_counter_key(); + let proposal_id = if let Some(id) = data.id { + id + } else { + ctx.read(&counter_key)?.unwrap() + }; + + let content_key = storage::get_content_key(proposal_id); + ctx.write_bytes(&content_key, data.content)?; + + let author_key = storage::get_author_key(proposal_id); + ctx.write(&author_key, data.author.clone())?; + + let voting_start_epoch_key = + storage::get_voting_start_epoch_key(proposal_id); + ctx.write(&voting_start_epoch_key, data.voting_start_epoch)?; + + let voting_end_epoch_key = storage::get_voting_end_epoch_key(proposal_id); + ctx.write(&voting_end_epoch_key, data.voting_end_epoch)?; + + let grace_epoch_key = storage::get_grace_epoch_key(proposal_id); + ctx.write(&grace_epoch_key, data.grace_epoch)?; + + if let Some(proposal_code) = data.proposal_code { + let proposal_code_key = storage::get_proposal_code_key(proposal_id); + ctx.write_bytes(&proposal_code_key, proposal_code)?; + } + + ctx.write(&counter_key, proposal_id + 1)?; + + let min_proposal_funds_key = storage::get_min_proposal_fund_key(); + let min_proposal_funds: Amount = + ctx.read(&min_proposal_funds_key)?.unwrap(); + + let funds_key = storage::get_funds_key(proposal_id); + ctx.write(&funds_key, min_proposal_funds)?; + + // this key must always be written for each proposal + let committing_proposals_key = + storage::get_committing_proposals_key(proposal_id, data.grace_epoch.0); + ctx.write(&committing_proposals_key, ())?; + + transfer( + ctx, + &data.author, + &governance_address, + &m1t(), + None, + min_proposal_funds, + ) +} + +/// A proposal vote transaction. +pub fn vote_proposal(ctx: &mut Ctx, data: VoteProposalData) -> TxResult { + for delegation in data.delegations { + let vote_key = storage::get_vote_proposal_key( + data.id, + data.voter.clone(), + delegation, + ); + ctx.write(&vote_key, data.vote.clone())?; + } + Ok(()) +} diff --git a/tx_prelude/src/ibc.rs b/tx_prelude/src/ibc.rs new file mode 100644 index 0000000000..cd2f0a1293 --- /dev/null +++ b/tx_prelude/src/ibc.rs @@ -0,0 +1,73 @@ +//! IBC lower-level functions for transactions. + +pub use namada::ledger::ibc::handler::{Error, IbcActions, Result}; +use namada::ledger::storage_api::{StorageRead, StorageWrite}; +use namada::ledger::tx_env::TxEnv; +use namada::types::address::Address; +pub use namada::types::ibc::IbcEvent; +use namada::types::storage::{BlockHeight, Key}; +use namada::types::time::Rfc3339String; +use namada::types::token::Amount; + +use crate::token::transfer; +use crate::Ctx; + +impl IbcActions for Ctx { + type Error = crate::Error; + + fn read_ibc_data( + &self, + key: &Key, + ) -> std::result::Result>, Self::Error> { + let data = self.read_bytes(key)?; + Ok(data) + } + + fn write_ibc_data( + &mut self, + key: &Key, + data: impl AsRef<[u8]>, + ) -> std::result::Result<(), Self::Error> { + self.write_bytes(key, data)?; + Ok(()) + } + + fn delete_ibc_data( + &mut self, + key: &Key, + ) -> std::result::Result<(), Self::Error> { + self.delete(key)?; + Ok(()) + } + + fn emit_ibc_event( + &mut self, + event: IbcEvent, + ) -> std::result::Result<(), Self::Error> { + ::emit_ibc_event(self, &event)?; + Ok(()) + } + + fn transfer_token( + &mut self, + src: &Address, + dest: &Address, + token: &Address, + amount: Amount, + ) -> std::result::Result<(), Self::Error> { + transfer(self, src, dest, token, None, amount)?; + Ok(()) + } + + fn get_height(&self) -> std::result::Result { + let val = self.get_block_height()?; + Ok(val) + } + + fn get_header_time( + &self, + ) -> std::result::Result { + let val = self.get_block_time()?; + Ok(val) + } +} diff --git a/tx_prelude/src/lib.rs b/tx_prelude/src/lib.rs index 315c68384e..730adb3155 100644 --- a/tx_prelude/src/lib.rs +++ b/tx_prelude/src/lib.rs @@ -6,16 +6,319 @@ #![deny(rustdoc::broken_intra_doc_links)] #![deny(rustdoc::private_intra_doc_links)] -pub use namada_vm_env::tx_prelude::*; +pub mod governance; +pub mod ibc; +pub mod nft; +pub mod proof_of_stake; +pub mod token; -/// Log a string in a debug build. The message will be printed at the -/// `tracing::Level::Info`. Any `debug_log!` statements are only enabled in -/// non optimized builds by default. An optimized build will not execute -/// `debug_log!` statements unless `-C debug-assertions` is passed to the -/// compiler. +use core::slice; +use std::marker::PhantomData; + +pub use borsh::{BorshDeserialize, BorshSerialize}; +pub use namada::ledger::governance::storage as gov_storage; +pub use namada::ledger::parameters::storage as parameters_storage; +pub use namada::ledger::slash_fund::storage as slash_fund_storage; +pub use namada::ledger::storage::types::encode; +pub use namada::ledger::storage_api::{ + self, iter_prefix, iter_prefix_bytes, rev_iter_prefix, + rev_iter_prefix_bytes, Error, OptionExt, ResultExt, StorageRead, + StorageWrite, +}; +pub use namada::ledger::tx_env::TxEnv; +pub use namada::proto::{Signed, SignedTxData}; +pub use namada::types::address::Address; +use namada::types::chain::CHAIN_ID_LENGTH; +use namada::types::internal::HostEnvResult; +use namada::types::storage::{ + BlockHash, BlockHeight, Epoch, BLOCK_HASH_LENGTH, +}; +use namada::types::time::Rfc3339String; +pub use namada::types::*; +pub use namada_macros::transaction; +use namada_vm_env::tx::*; +use namada_vm_env::{read_from_buffer, read_key_val_bytes_from_buffer}; + +pub use crate::ibc::IbcActions; +pub use crate::proof_of_stake::{PosRead, PosWrite}; + +/// Log a string. The message will be printed at the `tracing::Level::Info`. +pub fn log_string>(msg: T) { + let msg = msg.as_ref(); + unsafe { + anoma_tx_log_string(msg.as_ptr() as _, msg.len() as _); + } +} + +/// Format and log a string in a debug build. +/// +/// In WASM target debug build, the message will be printed at the +/// `tracing::Level::Info` when executed in the VM. An optimized build will +/// omit any `debug_log!` statements unless `-C debug-assertions` is passed to +/// the compiler. +/// +/// In non-WASM target, the message is simply printed out to stdout. #[macro_export] macro_rules! debug_log { ($($arg:tt)*) => {{ - (if cfg!(debug_assertions) { log_string(format!($($arg)*)) }) - }} + ( + if cfg!(target_arch = "wasm32") { + if cfg!(debug_assertions) + { + log_string(format!($($arg)*)); + } + } else { + println!($($arg)*); + } + ) + }}; +} + +/// Execution context provides access to the host environment functions +pub struct Ctx(()); + +impl Ctx { + /// Create a host context. The context on WASM side is only provided by + /// the VM once its being executed (in here it's implicit). But + /// because we want to have interface identical with the native + /// VPs, in which the context is explicit, in here we're just + /// using an empty `Ctx` to "fake" it. + /// + /// # Safety + /// + /// When using `#[transaction]` macro from `anoma_macros`, + /// the constructor should not be called from transactions and validity + /// predicates implementation directly - they receive `&Self` as + /// an argument provided by the macro that wrap the low-level WASM + /// interface with Rust native types. + /// + /// Otherwise, this should only be called once to initialize this "fake" + /// context in order to benefit from type-safety of the host environment + /// methods implemented on the context. + #[allow(clippy::new_without_default)] + pub const unsafe fn new() -> Self { + Self(()) + } +} + +/// Result of `TxEnv`, `storage_api::StorageRead` or `storage_api::StorageWrite` +/// method call +pub type EnvResult = Result; + +/// Transaction result +pub type TxResult = EnvResult<()>; + +#[derive(Debug)] +pub struct KeyValIterator(pub u64, pub PhantomData); + +impl StorageRead<'_> for Ctx { + type PrefixIter = KeyValIterator<(String, Vec)>; + + fn read_bytes( + &self, + key: &namada::types::storage::Key, + ) -> Result>, Error> { + let key = key.to_string(); + let read_result = + unsafe { anoma_tx_read(key.as_ptr() as _, key.len() as _) }; + Ok(read_from_buffer(read_result, anoma_tx_result_buffer)) + } + + fn has_key( + &self, + key: &namada::types::storage::Key, + ) -> Result { + let key = key.to_string(); + let found = + unsafe { anoma_tx_has_key(key.as_ptr() as _, key.len() as _) }; + Ok(HostEnvResult::is_success(found)) + } + + fn get_chain_id(&self) -> Result { + let result = Vec::with_capacity(CHAIN_ID_LENGTH); + unsafe { + anoma_tx_get_chain_id(result.as_ptr() as _); + } + let slice = + unsafe { slice::from_raw_parts(result.as_ptr(), CHAIN_ID_LENGTH) }; + Ok(String::from_utf8(slice.to_vec()) + .expect("Cannot convert the ID string")) + } + + fn get_block_height( + &self, + ) -> Result { + Ok(BlockHeight(unsafe { anoma_tx_get_block_height() })) + } + + fn get_block_hash( + &self, + ) -> Result { + let result = Vec::with_capacity(BLOCK_HASH_LENGTH); + unsafe { + anoma_tx_get_block_hash(result.as_ptr() as _); + } + let slice = unsafe { + slice::from_raw_parts(result.as_ptr(), BLOCK_HASH_LENGTH) + }; + Ok(BlockHash::try_from(slice).expect("Cannot convert the hash")) + } + + fn get_block_epoch(&self) -> Result { + Ok(Epoch(unsafe { anoma_tx_get_block_epoch() })) + } + + fn iter_prefix( + &self, + prefix: &namada::types::storage::Key, + ) -> Result { + let prefix = prefix.to_string(); + let iter_id = unsafe { + anoma_tx_iter_prefix(prefix.as_ptr() as _, prefix.len() as _) + }; + Ok(KeyValIterator(iter_id, PhantomData)) + } + + fn rev_iter_prefix( + &self, + prefix: &storage::Key, + ) -> Result { + let prefix = prefix.to_string(); + let iter_id = unsafe { + anoma_tx_rev_iter_prefix(prefix.as_ptr() as _, prefix.len() as _) + }; + Ok(KeyValIterator(iter_id, PhantomData)) + } + + fn iter_next( + &self, + iter: &mut Self::PrefixIter, + ) -> Result)>, Error> { + let read_result = unsafe { anoma_tx_iter_next(iter.0) }; + Ok(read_key_val_bytes_from_buffer( + read_result, + anoma_tx_result_buffer, + )) + } +} + +impl StorageWrite for Ctx { + fn write_bytes( + &mut self, + key: &namada::types::storage::Key, + val: impl AsRef<[u8]>, + ) -> storage_api::Result<()> { + let key = key.to_string(); + unsafe { + anoma_tx_write( + key.as_ptr() as _, + key.len() as _, + val.as_ref().as_ptr() as _, + val.as_ref().len() as _, + ) + }; + Ok(()) + } + + fn delete( + &mut self, + key: &namada::types::storage::Key, + ) -> storage_api::Result<()> { + let key = key.to_string(); + unsafe { anoma_tx_delete(key.as_ptr() as _, key.len() as _) }; + Ok(()) + } +} + +impl TxEnv<'_> for Ctx { + fn get_block_time(&self) -> Result { + let read_result = unsafe { anoma_tx_get_block_time() }; + let time_value = read_from_buffer(read_result, anoma_tx_result_buffer) + .expect("The block time should exist"); + Ok(Rfc3339String( + String::try_from_slice(&time_value[..]) + .expect("The conversion shouldn't fail"), + )) + } + + fn write_temp( + &mut self, + key: &namada::types::storage::Key, + val: T, + ) -> Result<(), Error> { + let buf = val.try_to_vec().unwrap(); + self.write_bytes_temp(key, buf) + } + + fn write_bytes_temp( + &mut self, + key: &namada::types::storage::Key, + val: impl AsRef<[u8]>, + ) -> Result<(), Error> { + let key = key.to_string(); + unsafe { + anoma_tx_write_temp( + key.as_ptr() as _, + key.len() as _, + val.as_ref().as_ptr() as _, + val.as_ref().len() as _, + ) + }; + Ok(()) + } + + fn insert_verifier(&mut self, addr: &Address) -> Result<(), Error> { + let addr = addr.encode(); + unsafe { anoma_tx_insert_verifier(addr.as_ptr() as _, addr.len() as _) } + Ok(()) + } + + fn init_account( + &mut self, + code: impl AsRef<[u8]>, + ) -> Result { + let code = code.as_ref(); + let result = Vec::with_capacity(address::ESTABLISHED_ADDRESS_BYTES_LEN); + unsafe { + anoma_tx_init_account( + code.as_ptr() as _, + code.len() as _, + result.as_ptr() as _, + ) + }; + let slice = unsafe { + slice::from_raw_parts( + result.as_ptr(), + address::ESTABLISHED_ADDRESS_BYTES_LEN, + ) + }; + Ok(Address::try_from_slice(slice) + .expect("Decoding address created by the ledger shouldn't fail")) + } + + fn update_validity_predicate( + &mut self, + addr: &Address, + code: impl AsRef<[u8]>, + ) -> Result<(), Error> { + let addr = addr.encode(); + let code = code.as_ref(); + unsafe { + anoma_tx_update_validity_predicate( + addr.as_ptr() as _, + addr.len() as _, + code.as_ptr() as _, + code.len() as _, + ) + }; + Ok(()) + } + + fn emit_ibc_event(&mut self, event: &ibc::IbcEvent) -> Result<(), Error> { + let event = BorshSerialize::try_to_vec(event).unwrap(); + unsafe { + anoma_tx_emit_ibc_event(event.as_ptr() as _, event.len() as _) + }; + Ok(()) + } } diff --git a/tx_prelude/src/nft.rs b/tx_prelude/src/nft.rs new file mode 100644 index 0000000000..4ed179fe27 --- /dev/null +++ b/tx_prelude/src/nft.rs @@ -0,0 +1,89 @@ +use namada::types::address::Address; +use namada::types::nft; +use namada::types::nft::NftToken; +use namada::types::transaction::nft::{CreateNft, MintNft}; + +use super::*; + +/// Initialize a new NFT token address. +pub fn init_nft(ctx: &mut Ctx, nft: CreateNft) -> EnvResult
{ + let address = ctx.init_account(&nft.vp_code)?; + + // write tag + let tag_key = nft::get_tag_key(&address); + ctx.write(&tag_key, &nft.tag)?; + + // write creator + let creator_key = nft::get_creator_key(&address); + ctx.write(&creator_key, &nft.creator)?; + + // write keys + let keys_key = nft::get_keys_key(&address); + ctx.write(&keys_key, &nft.keys)?; + + // write optional keys + let optional_keys_key = nft::get_optional_keys_key(&address); + ctx.write(&optional_keys_key, nft.opt_keys)?; + + // mint tokens + aux_mint_token(ctx, &address, &nft.creator, nft.tokens, &nft.creator)?; + + ctx.insert_verifier(&nft.creator)?; + + Ok(address) +} + +pub fn mint_tokens(ctx: &mut Ctx, nft: MintNft) -> TxResult { + aux_mint_token(ctx, &nft.address, &nft.creator, nft.tokens, &nft.creator) +} + +fn aux_mint_token( + ctx: &mut Ctx, + nft_address: &Address, + creator_address: &Address, + tokens: Vec, + verifier: &Address, +) -> TxResult { + for token in tokens { + // write token metadata + let metadata_key = + nft::get_token_metadata_key(nft_address, &token.id.to_string()); + ctx.write(&metadata_key, &token.metadata)?; + + // write current owner token as creator + let current_owner_key = nft::get_token_current_owner_key( + nft_address, + &token.id.to_string(), + ); + ctx.write( + ¤t_owner_key, + &token + .current_owner + .unwrap_or_else(|| creator_address.clone()), + )?; + + // write value key + let value_key = + nft::get_token_value_key(nft_address, &token.id.to_string()); + ctx.write(&value_key, &token.values)?; + + // write optional value keys + let optional_value_key = nft::get_token_optional_value_key( + nft_address, + &token.id.to_string(), + ); + ctx.write(&optional_value_key, &token.opt_values)?; + + // write approval addresses + let approval_key = + nft::get_token_approval_key(nft_address, &token.id.to_string()); + ctx.write(&approval_key, &token.approvals)?; + + // write burnt propriety + let burnt_key = + nft::get_token_burnt_key(nft_address, &token.id.to_string()); + ctx.write(&burnt_key, token.burnt)?; + } + ctx.insert_verifier(verifier)?; + Ok(()) +} diff --git a/tx_prelude/src/proof_of_stake.rs b/tx_prelude/src/proof_of_stake.rs new file mode 100644 index 0000000000..383e7202f7 --- /dev/null +++ b/tx_prelude/src/proof_of_stake.rs @@ -0,0 +1,267 @@ +//! Proof of Stake system integration with functions for transactions + +pub use namada::ledger::pos::*; +use namada::ledger::pos::{ + bond_key, namada_proof_of_stake, params_key, total_voting_power_key, + unbond_key, validator_address_raw_hash_key, validator_consensus_key_key, + validator_eth_cold_key_key, validator_eth_hot_key_key, validator_set_key, + validator_slashes_key, validator_staking_reward_address_key, + validator_state_key, validator_total_deltas_key, + validator_voting_power_key, +}; +use namada::types::address::Address; +use namada::types::transaction::InitValidator; +use namada::types::{key, token}; +pub use namada_proof_of_stake::{ + epoched, parameters, types, PosActions as PosWrite, PosReadOnly as PosRead, +}; + +use super::*; + +impl Ctx { + /// Self-bond tokens to a validator when `source` is `None` or equal to + /// the `validator` address, or delegate tokens from the `source` to the + /// `validator`. + pub fn bond_tokens( + &mut self, + source: Option<&Address>, + validator: &Address, + amount: token::Amount, + ) -> TxResult { + let current_epoch = self.get_block_epoch()?; + namada_proof_of_stake::PosActions::bond_tokens( + self, + source, + validator, + amount, + current_epoch, + ) + } + + /// Unbond self-bonded tokens from a validator when `source` is `None` or + /// equal to the `validator` address, or unbond delegated tokens from + /// the `source` to the `validator`. + pub fn unbond_tokens( + &mut self, + source: Option<&Address>, + validator: &Address, + amount: token::Amount, + ) -> TxResult { + let current_epoch = self.get_block_epoch()?; + namada_proof_of_stake::PosActions::unbond_tokens( + self, + source, + validator, + amount, + current_epoch, + ) + } + + /// Withdraw unbonded tokens from a self-bond to a validator when `source` + /// is `None` or equal to the `validator` address, or withdraw unbonded + /// tokens delegated to the `validator` to the `source`. + pub fn withdraw_tokens( + &mut self, + source: Option<&Address>, + validator: &Address, + ) -> EnvResult { + let current_epoch = self.get_block_epoch()?; + namada_proof_of_stake::PosActions::withdraw_tokens( + self, + source, + validator, + current_epoch, + ) + } + + /// Attempt to initialize a validator account. On success, returns the + /// initialized validator account's address and its staking reward address. + pub fn init_validator( + &mut self, + InitValidator { + account_key, + consensus_key, + eth_cold_key, + eth_hot_key, + rewards_account_key, + protocol_key, + dkg_key, + validator_vp_code, + rewards_vp_code, + }: InitValidator, + ) -> EnvResult<(Address, Address)> { + let current_epoch = self.get_block_epoch()?; + // Init validator account + let validator_address = self.init_account(&validator_vp_code)?; + let pk_key = key::pk_key(&validator_address); + self.write(&pk_key, &account_key)?; + let protocol_pk_key = key::protocol_pk_key(&validator_address); + self.write(&protocol_pk_key, &protocol_key)?; + let dkg_pk_key = key::dkg_session_keys::dkg_pk_key(&validator_address); + self.write(&dkg_pk_key, &dkg_key)?; + + // Init staking reward account + let rewards_address = self.init_account(&rewards_vp_code)?; + let pk_key = key::pk_key(&rewards_address); + self.write(&pk_key, &rewards_account_key)?; + + let eth_cold_key = key::common::PublicKey::Secp256k1(eth_cold_key); + let eth_hot_key = key::common::PublicKey::Secp256k1(eth_hot_key); + self.become_validator( + &validator_address, + &rewards_address, + &consensus_key, + ð_cold_key, + ð_hot_key, + current_epoch, + )?; + + Ok((validator_address, rewards_address)) + } +} + +namada::impl_pos_read_only! { + type Error = crate::Error; + impl namada_proof_of_stake::PosReadOnly for Ctx +} + +impl namada_proof_of_stake::PosActions for Ctx { + type BecomeValidatorError = crate::Error; + type BondError = crate::Error; + type UnbondError = crate::Error; + type WithdrawError = crate::Error; + + fn write_pos_params( + &mut self, + params: &PosParams, + ) -> Result<(), Self::Error> { + self.write(¶ms_key(), params) + } + + fn write_validator_address_raw_hash( + &mut self, + address: &Self::Address, + consensus_key: &Self::PublicKey, + ) -> Result<(), Self::Error> { + let raw_hash = key::tm_consensus_key_raw_hash(consensus_key); + self.write(&validator_address_raw_hash_key(raw_hash), address) + } + + fn write_validator_staking_reward_address( + &mut self, + key: &Self::Address, + value: Self::Address, + ) -> Result<(), Self::Error> { + self.write(&validator_staking_reward_address_key(key), &value) + } + + fn write_validator_consensus_key( + &mut self, + key: &Self::Address, + value: ValidatorConsensusKeys, + ) -> Result<(), Self::Error> { + self.write(&validator_consensus_key_key(key), &value) + } + + fn write_validator_eth_cold_key( + &mut self, + address: &Self::Address, + value: types::ValidatorEthKey, + ) -> Result<(), Self::Error> { + let f = format!( + "address {:?} write_validator_eth_cold_key - {:#?}", + address, value + ); + log_string(f); + let encoded = encode(&value); + log_string(&format!("encoded cold key: {:#?}", encoded)); + self.write(&validator_eth_cold_key_key(address), &value) + } + + fn write_validator_eth_hot_key( + &mut self, + address: &Self::Address, + value: types::ValidatorEthKey, + ) -> Result<(), Self::Error> { + let f = format!( + "address {:?} write_validator_eth_hot_key - {:#?}", + address, value + ); + log_string(f); + let encoded = encode(&value); + log_string(&format!("encoded hot key: {:#?}", encoded)); + self.write(&validator_eth_hot_key_key(address), &value) + } + + fn write_validator_state( + &mut self, + key: &Self::Address, + value: ValidatorStates, + ) -> Result<(), Self::Error> { + self.write(&validator_state_key(key), &value) + } + + fn write_validator_total_deltas( + &mut self, + key: &Self::Address, + value: ValidatorTotalDeltas, + ) -> Result<(), Self::Error> { + self.write(&validator_total_deltas_key(key), &value) + } + + fn write_validator_voting_power( + &mut self, + key: &Self::Address, + value: ValidatorVotingPowers, + ) -> Result<(), Self::Error> { + self.write(&validator_voting_power_key(key), &value) + } + + fn write_bond( + &mut self, + key: &BondId, + value: Bonds, + ) -> Result<(), Self::Error> { + self.write(&bond_key(key), &value) + } + + fn write_unbond( + &mut self, + key: &BondId, + value: Unbonds, + ) -> Result<(), Self::Error> { + self.write(&unbond_key(key), &value) + } + + fn write_validator_set( + &mut self, + value: ValidatorSets, + ) -> Result<(), Self::Error> { + self.write(&validator_set_key(), &value) + } + + fn write_total_voting_power( + &mut self, + value: TotalVotingPowers, + ) -> Result<(), Self::Error> { + self.write(&total_voting_power_key(), &value) + } + + fn delete_bond(&mut self, key: &BondId) -> Result<(), Self::Error> { + self.delete(&bond_key(key)) + } + + fn delete_unbond(&mut self, key: &BondId) -> Result<(), Self::Error> { + self.delete(&unbond_key(key)) + } + + fn transfer( + &mut self, + token: &Self::Address, + amount: Self::TokenAmount, + src: &Self::Address, + dest: &Self::Address, + ) -> Result<(), Self::Error> { + crate::token::transfer(self, src, dest, token, None, amount) + } +} diff --git a/tx_prelude/src/token.rs b/tx_prelude/src/token.rs new file mode 100644 index 0000000000..90245ee01f --- /dev/null +++ b/tx_prelude/src/token.rs @@ -0,0 +1,134 @@ +use namada::types::address::{Address, InternalAddress}; +use namada::types::token; +pub use namada::types::token::*; + +use super::*; + +/// A token transfer that can be used in a transaction. +pub fn transfer( + ctx: &mut Ctx, + src: &Address, + dest: &Address, + token: &Address, + sub_prefix: Option, + amount: Amount, +) -> TxResult { + let src_key = match &sub_prefix { + Some(sub_prefix) => { + let prefix = token::multitoken_balance_prefix(token, sub_prefix); + token::multitoken_balance_key(&prefix, src) + } + None => token::balance_key(token, src), + }; + let dest_key = match &sub_prefix { + Some(sub_prefix) => { + let prefix = token::multitoken_balance_prefix(token, sub_prefix); + token::multitoken_balance_key(&prefix, dest) + } + None => token::balance_key(token, dest), + }; + let src_bal: Option = match src { + Address::Internal(InternalAddress::IbcMint) => Some(Amount::max()), + Address::Internal(InternalAddress::IbcBurn) => { + log_string("invalid transfer from the burn address"); + unreachable!() + } + _ => ctx.read(&src_key)?, + }; + let mut src_bal = src_bal.unwrap_or_else(|| { + log_string(format!("src {} has no balance", src_key)); + unreachable!() + }); + src_bal.spend(&amount); + let mut dest_bal: Amount = match dest { + Address::Internal(InternalAddress::IbcMint) => { + log_string("invalid transfer to the mint address"); + unreachable!() + } + _ => ctx.read(&dest_key)?.unwrap_or_default(), + }; + dest_bal.receive(&amount); + match src { + Address::Internal(InternalAddress::IbcMint) => { + ctx.write_temp(&src_key, src_bal)?; + } + Address::Internal(InternalAddress::IbcBurn) => unreachable!(), + _ => { + ctx.write(&src_key, src_bal)?; + } + } + match dest { + Address::Internal(InternalAddress::IbcMint) => unreachable!(), + Address::Internal(InternalAddress::IbcBurn) => { + ctx.write_temp(&dest_key, dest_bal)?; + } + _ => { + ctx.write(&dest_key, dest_bal)?; + } + } + Ok(()) +} + +/// A token transfer with storage keys that can be used in a transaction. +pub fn transfer_with_keys( + ctx: &mut Ctx, + src_key: &storage::Key, + dest_key: &storage::Key, + amount: Amount, +) -> TxResult { + let src_owner = is_any_multitoken_balance_key(src_key).map(|(_, o)| o); + let src_bal: Option = match src_owner { + Some(Address::Internal(InternalAddress::IbcMint)) => { + Some(Amount::max()) + } + Some(Address::Internal(InternalAddress::IbcBurn)) => { + log_string("invalid transfer from the burn address"); + unreachable!() + } + Some(_) => ctx.read(src_key)?, + None => { + // the key is not a multitoken key + match is_any_token_balance_key(src_key) { + Some(_) => ctx.read(src_key)?, + None => { + log_string(format!("invalid balance key: {}", src_key)); + unreachable!() + } + } + } + }; + let mut src_bal = src_bal.unwrap_or_else(|| { + log_string(format!("src {} has no balance", src_key)); + unreachable!() + }); + src_bal.spend(&amount); + let dest_owner = is_any_multitoken_balance_key(dest_key).map(|(_, o)| o); + let mut dest_bal: Amount = match dest_owner { + Some(Address::Internal(InternalAddress::IbcMint)) => { + log_string("invalid transfer to the mint address"); + unreachable!() + } + Some(_) => ctx.read(dest_key)?.unwrap_or_default(), + None => match is_any_token_balance_key(dest_key) { + Some(_) => ctx.read(dest_key)?.unwrap_or_default(), + None => { + log_string(format!("invalid balance key: {}", dest_key)); + unreachable!() + } + }, + }; + dest_bal.receive(&amount); + match src_owner { + Some(Address::Internal(InternalAddress::IbcMint)) => { + ctx.write_temp(src_key, src_bal)?; + } + _ => ctx.write(src_key, src_bal)?, + } + match dest_owner { + Some(Address::Internal(InternalAddress::IbcBurn)) => { + ctx.write_temp(dest_key, dest_bal)?; + } + _ => ctx.write(dest_key, dest_bal)?, + } + Ok(()) +} diff --git a/vm_env/Cargo.toml b/vm_env/Cargo.toml index 672f013be6..f2c8854f85 100644 --- a/vm_env/Cargo.toml +++ b/vm_env/Cargo.toml @@ -4,7 +4,7 @@ edition = "2021" license = "GPL-3.0" name = "namada_vm_env" resolver = "2" -version = "0.7.1" +version = "0.8.1" [features] default = ["abciplus"] @@ -19,6 +19,4 @@ abcipp = [ [dependencies] namada = {path = "../shared", default-features = false} -namada_macros = {path = "../macros"} borsh = "0.9.0" -hex = "0.4.3" diff --git a/vm_env/src/governance.rs b/vm_env/src/governance.rs deleted file mode 100644 index cfe16e2acb..0000000000 --- a/vm_env/src/governance.rs +++ /dev/null @@ -1,82 +0,0 @@ -/// Tx imports and functions. -pub mod tx { - - use namada::ledger::governance::storage; - use namada::ledger::governance::vp::ADDRESS as governance_address; - use namada::types::address::xan as m1t; - use namada::types::token::Amount; - use namada::types::transaction::governance::{ - InitProposalData, VoteProposalData, - }; - - use crate::imports::tx; - use crate::token::tx::transfer; - - /// A proposal creation transaction. - pub fn init_proposal(data: InitProposalData) { - let counter_key = storage::get_counter_key(); - let proposal_id = if let Some(id) = data.id { - id - } else { - tx::read(&counter_key.to_string()).unwrap() - }; - - let content_key = storage::get_content_key(proposal_id); - tx::write_bytes(&content_key.to_string(), data.content); - - let author_key = storage::get_author_key(proposal_id); - tx::write(&author_key.to_string(), data.author.clone()); - - let voting_start_epoch_key = - storage::get_voting_start_epoch_key(proposal_id); - tx::write(&voting_start_epoch_key.to_string(), data.voting_start_epoch); - - let voting_end_epoch_key = - storage::get_voting_end_epoch_key(proposal_id); - tx::write(&voting_end_epoch_key.to_string(), data.voting_end_epoch); - - let grace_epoch_key = storage::get_grace_epoch_key(proposal_id); - tx::write(&grace_epoch_key.to_string(), data.grace_epoch); - - if let Some(proposal_code) = data.proposal_code { - let proposal_code_key = storage::get_proposal_code_key(proposal_id); - tx::write_bytes(&proposal_code_key.to_string(), proposal_code); - } - - tx::write(&counter_key.to_string(), proposal_id + 1); - - let min_proposal_funds_key = storage::get_min_proposal_fund_key(); - let min_proposal_funds: Amount = - tx::read(&min_proposal_funds_key.to_string()).unwrap(); - - let funds_key = storage::get_funds_key(proposal_id); - tx::write(&funds_key.to_string(), min_proposal_funds); - - // this key must always be written for each proposal - let committing_proposals_key = storage::get_committing_proposals_key( - proposal_id, - data.grace_epoch.0, - ); - tx::write(&committing_proposals_key.to_string(), ()); - - transfer( - &data.author, - &governance_address, - &m1t(), - None, - min_proposal_funds, - ); - } - - /// A proposal vote transaction. - pub fn vote_proposal(data: VoteProposalData) { - for delegation in data.delegations { - let vote_key = storage::get_vote_proposal_key( - data.id, - data.voter.clone(), - delegation, - ); - tx::write(&vote_key.to_string(), data.vote.clone()); - } - } -} diff --git a/vm_env/src/ibc.rs b/vm_env/src/ibc.rs deleted file mode 100644 index c7abb82ec5..0000000000 --- a/vm_env/src/ibc.rs +++ /dev/null @@ -1,50 +0,0 @@ -//! IBC functions for transactions. - -pub use namada::ledger::ibc::handler::IbcActions; -use namada::types::address::Address; -use namada::types::ibc::IbcEvent; -use namada::types::storage::{BlockHeight, Key}; -use namada::types::time::Rfc3339String; -use namada::types::token::Amount; - -use crate::imports::tx; -use crate::token::tx::transfer; - -/// This struct integrates and gives access to lower-level IBC functions. -pub struct Ibc; - -impl IbcActions for Ibc { - fn read_ibc_data(&self, key: &Key) -> Option> { - tx::read_bytes(key.to_string()) - } - - fn write_ibc_data(&self, key: &Key, data: impl AsRef<[u8]>) { - tx::write_bytes(key.to_string(), data) - } - - fn delete_ibc_data(&self, key: &Key) { - tx::delete(key.to_string()) - } - - fn emit_ibc_event(&self, event: IbcEvent) { - tx::emit_ibc_event(&event) - } - - fn transfer_token( - &self, - src: &Address, - dest: &Address, - token: &Address, - amount: Amount, - ) { - transfer(src, dest, token, None, amount) - } - - fn get_height(&self) -> BlockHeight { - tx::get_block_height() - } - - fn get_header_time(&self) -> Rfc3339String { - tx::get_block_time() - } -} diff --git a/vm_env/src/imports.rs b/vm_env/src/imports.rs deleted file mode 100644 index 2eabe77e54..0000000000 --- a/vm_env/src/imports.rs +++ /dev/null @@ -1,665 +0,0 @@ -use std::mem::ManuallyDrop; - -use borsh::BorshDeserialize; -use namada::types::internal::HostEnvResult; -use namada::vm::types::KeyVal; - -/// This function is a helper to handle the second step of reading var-len -/// values from the host. -/// -/// In cases where we're reading a value from the host in the guest and -/// we don't know the byte size up-front, we have to read it in 2-steps. The -/// first step reads the value into a result buffer and returns the size (if -/// any) back to the guest, the second step reads the value from cache into a -/// pre-allocated buffer with the obtained size. -fn read_from_buffer( - read_result: i64, - result_buffer: unsafe extern "C" fn(u64), -) -> Option> { - if HostEnvResult::is_fail(read_result) { - None - } else { - let result: Vec = Vec::with_capacity(read_result as _); - // The `result` will be dropped from the `target`, which is - // reconstructed from the same memory - let result = ManuallyDrop::new(result); - let offset = result.as_slice().as_ptr() as u64; - unsafe { result_buffer(offset) }; - let target = unsafe { - Vec::from_raw_parts(offset as _, read_result as _, read_result as _) - }; - Some(target) - } -} - -/// This function is a helper to handle the second step of reading var-len -/// values in a key-value pair from the host. -fn read_key_val_from_buffer( - read_result: i64, - result_buffer: unsafe extern "C" fn(u64), -) -> Option<(String, T)> { - let key_val = read_from_buffer(read_result, result_buffer) - .and_then(|t| KeyVal::try_from_slice(&t[..]).ok()); - key_val.and_then(|key_val| { - // decode the value - T::try_from_slice(&key_val.val) - .map(|val| (key_val.key, val)) - .ok() - }) -} - -/// Transaction environment imports -pub mod tx { - use core::slice; - use std::convert::TryFrom; - use std::marker::PhantomData; - - pub use borsh::{BorshDeserialize, BorshSerialize}; - use namada::types::address; - use namada::types::address::Address; - use namada::types::chain::CHAIN_ID_LENGTH; - use namada::types::ibc::IbcEvent; - use namada::types::internal::HostEnvResult; - use namada::types::storage::{ - BlockHash, BlockHeight, Epoch, BLOCK_HASH_LENGTH, - }; - use namada::types::time::Rfc3339String; - - #[derive(Debug)] - pub struct KeyValIterator(pub u64, pub PhantomData); - - /// Try to read a Borsh encoded variable-length value at the given key from - /// storage. - pub fn read(key: impl AsRef) -> Option { - let key = key.as_ref(); - let read_result = - unsafe { anoma_tx_read(key.as_ptr() as _, key.len() as _) }; - super::read_from_buffer(read_result, anoma_tx_result_buffer) - .and_then(|t| T::try_from_slice(&t[..]).ok()) - } - - /// Try to read a variable-length value as bytes at the given key from - /// storage. - pub fn read_bytes(key: impl AsRef) -> Option> { - let key = key.as_ref(); - let read_result = - unsafe { anoma_tx_read(key.as_ptr() as _, key.len() as _) }; - super::read_from_buffer(read_result, anoma_tx_result_buffer) - } - - /// Check if the given key is present in storage. - pub fn has_key(key: impl AsRef) -> bool { - let key = key.as_ref(); - let found = - unsafe { anoma_tx_has_key(key.as_ptr() as _, key.len() as _) }; - HostEnvResult::is_success(found) - } - - /// Write a value to be encoded with Borsh at the given key to storage. - pub fn write(key: impl AsRef, val: T) { - let buf = val.try_to_vec().unwrap(); - write_bytes(key, buf); - } - - /// Write a value as bytes at the given key to storage. - pub fn write_bytes(key: impl AsRef, val: impl AsRef<[u8]>) { - let key = key.as_ref(); - unsafe { - anoma_tx_write( - key.as_ptr() as _, - key.len() as _, - val.as_ref().as_ptr() as _, - val.as_ref().len() as _, - ) - }; - } - - /// Write a temporary value to be encoded with Borsh at the given key to - /// storage. - pub fn write_temp(key: impl AsRef, val: T) { - let buf = val.try_to_vec().unwrap(); - write_bytes_temp(key, buf); - } - - /// Write a temporary value as bytes at the given key to storage. - pub fn write_bytes_temp(key: impl AsRef, val: impl AsRef<[u8]>) { - let key = key.as_ref(); - unsafe { - anoma_tx_write_temp( - key.as_ptr() as _, - key.len() as _, - val.as_ref().as_ptr() as _, - val.as_ref().len() as _, - ) - }; - } - - /// Delete a value at the given key from storage. - pub fn delete(key: impl AsRef) { - let key = key.as_ref(); - unsafe { anoma_tx_delete(key.as_ptr() as _, key.len() as _) }; - } - - /// Get an iterator with the given prefix. - /// - /// Important note: The prefix iterator will ignore keys that are not yet - /// committed to storage from the block in which this transaction is being - /// applied. It will only find keys that are already committed to - /// storage (i.e. from predecessor blocks). However, it will provide the - /// most up-to-date value for such keys. - pub fn iter_prefix( - prefix: impl AsRef, - ) -> KeyValIterator { - let prefix = prefix.as_ref(); - let iter_id = unsafe { - anoma_tx_iter_prefix(prefix.as_ptr() as _, prefix.len() as _) - }; - KeyValIterator(iter_id, PhantomData) - } - - impl Iterator for KeyValIterator { - type Item = (String, T); - - fn next(&mut self) -> Option<(String, T)> { - let read_result = unsafe { anoma_tx_iter_next(self.0) }; - super::read_key_val_from_buffer(read_result, anoma_tx_result_buffer) - } - } - - /// Insert a verifier address. This address must exist on chain, otherwise - /// the transaction will be rejected. - /// - /// Validity predicates of each verifier addresses inserted in the - /// transaction will validate the transaction and will receive all the - /// changed storage keys and initialized accounts in their inputs. - pub fn insert_verifier(addr: &Address) { - let addr = addr.encode(); - unsafe { anoma_tx_insert_verifier(addr.as_ptr() as _, addr.len() as _) } - } - - /// Update a validity predicate - pub fn update_validity_predicate(addr: &Address, code: impl AsRef<[u8]>) { - let addr = addr.encode(); - let code = code.as_ref(); - unsafe { - anoma_tx_update_validity_predicate( - addr.as_ptr() as _, - addr.len() as _, - code.as_ptr() as _, - code.len() as _, - ) - }; - } - - // Initialize a new account - pub fn init_account(code: impl AsRef<[u8]>) -> Address { - let code = code.as_ref(); - let result = Vec::with_capacity(address::ESTABLISHED_ADDRESS_BYTES_LEN); - unsafe { - anoma_tx_init_account( - code.as_ptr() as _, - code.len() as _, - result.as_ptr() as _, - ) - }; - let slice = unsafe { - slice::from_raw_parts( - result.as_ptr(), - address::ESTABLISHED_ADDRESS_BYTES_LEN, - ) - }; - Address::try_from_slice(slice) - .expect("Decoding address created by the ledger shouldn't fail") - } - - /// Emit an IBC event. There can be only one event per transaction. On - /// multiple calls, only the last emitted event will be used. - pub fn emit_ibc_event(event: &IbcEvent) { - let event = BorshSerialize::try_to_vec(event).unwrap(); - unsafe { - anoma_tx_emit_ibc_event(event.as_ptr() as _, event.len() as _) - }; - } - - /// Get the chain ID - pub fn get_chain_id() -> String { - let result = Vec::with_capacity(CHAIN_ID_LENGTH); - unsafe { - anoma_tx_get_chain_id(result.as_ptr() as _); - } - let slice = - unsafe { slice::from_raw_parts(result.as_ptr(), CHAIN_ID_LENGTH) }; - String::from_utf8(slice.to_vec()).expect("Cannot convert the ID string") - } - - /// Get height of the current block - pub fn get_block_height() -> BlockHeight { - BlockHeight(unsafe { anoma_tx_get_block_height() }) - } - - /// Get time of the current block header as rfc 3339 string - pub fn get_block_time() -> Rfc3339String { - let read_result = unsafe { anoma_tx_get_block_time() }; - let time_value = - super::read_from_buffer(read_result, anoma_tx_result_buffer) - .expect("The block time should exist"); - Rfc3339String( - String::try_from_slice(&time_value[..]) - .expect("The conversion shouldn't fail"), - ) - } - - /// Get hash of the current block - pub fn get_block_hash() -> BlockHash { - let result = Vec::with_capacity(BLOCK_HASH_LENGTH); - unsafe { - anoma_tx_get_block_hash(result.as_ptr() as _); - } - let slice = unsafe { - slice::from_raw_parts(result.as_ptr(), BLOCK_HASH_LENGTH) - }; - BlockHash::try_from(slice).expect("Cannot convert the hash") - } - - /// Get epoch of the current block - pub fn get_block_epoch() -> Epoch { - Epoch(unsafe { anoma_tx_get_block_epoch() }) - } - - /// Log a string. The message will be printed at the `tracing::Level::Info`. - pub fn log_string>(msg: T) { - let msg = msg.as_ref(); - unsafe { - anoma_tx_log_string(msg.as_ptr() as _, msg.len() as _); - } - } - - // These host functions are implemented in the Anoma's [`host_env`] - // module. The environment provides calls to them via this C interface. - extern "C" { - // Read variable-length data when we don't know the size up-front, - // returns the size of the value (can be 0), or -1 if the key is - // not present. If a value is found, it will be placed in the read - // cache, because we cannot allocate a buffer for it before we know - // its size. - fn anoma_tx_read(key_ptr: u64, key_len: u64) -> i64; - - // Read a value from result buffer. - fn anoma_tx_result_buffer(result_ptr: u64); - - // Returns 1 if the key is present, -1 otherwise. - fn anoma_tx_has_key(key_ptr: u64, key_len: u64) -> i64; - - // Write key/value - fn anoma_tx_write( - key_ptr: u64, - key_len: u64, - val_ptr: u64, - val_len: u64, - ); - - // Write a temporary key/value - fn anoma_tx_write_temp( - key_ptr: u64, - key_len: u64, - val_ptr: u64, - val_len: u64, - ); - - // Delete the given key and its value - fn anoma_tx_delete(key_ptr: u64, key_len: u64); - - // Get an ID of a data iterator with key prefix - fn anoma_tx_iter_prefix(prefix_ptr: u64, prefix_len: u64) -> u64; - - // Returns the size of the value (can be 0), or -1 if there's no next - // value. If a value is found, it will be placed in the read - // cache, because we cannot allocate a buffer for it before we know - // its size. - fn anoma_tx_iter_next(iter_id: u64) -> i64; - - // Insert a verifier - fn anoma_tx_insert_verifier(addr_ptr: u64, addr_len: u64); - - // Update a validity predicate - fn anoma_tx_update_validity_predicate( - addr_ptr: u64, - addr_len: u64, - code_ptr: u64, - code_len: u64, - ); - - // Initialize a new account - fn anoma_tx_init_account(code_ptr: u64, code_len: u64, result_ptr: u64); - - // Emit an IBC event - fn anoma_tx_emit_ibc_event(event_ptr: u64, event_len: u64); - - // Get the chain ID - fn anoma_tx_get_chain_id(result_ptr: u64); - - // Get the current block height - fn anoma_tx_get_block_height() -> u64; - - // Get the time of the current block header - fn anoma_tx_get_block_time() -> i64; - - // Get the current block hash - fn anoma_tx_get_block_hash(result_ptr: u64); - - // Get the current block epoch - fn anoma_tx_get_block_epoch() -> u64; - - // Requires a node running with "Info" log level - fn anoma_tx_log_string(str_ptr: u64, str_len: u64); - } -} - -/// Validity predicate environment imports -pub mod vp { - use core::slice; - use std::convert::TryFrom; - use std::marker::PhantomData; - - pub use borsh::{BorshDeserialize, BorshSerialize}; - use namada::types::chain::CHAIN_ID_LENGTH; - use namada::types::hash::{Hash, HASH_LENGTH}; - use namada::types::internal::HostEnvResult; - use namada::types::key::*; - use namada::types::storage::{ - BlockHash, BlockHeight, Epoch, BLOCK_HASH_LENGTH, - }; - - pub struct PreKeyValIterator(pub u64, pub PhantomData); - - pub struct PostKeyValIterator(pub u64, pub PhantomData); - - /// Try to read a Borsh encoded variable-length value at the given key from - /// storage before transaction execution. - pub fn read_pre(key: impl AsRef) -> Option { - let key = key.as_ref(); - let read_result = - unsafe { anoma_vp_read_pre(key.as_ptr() as _, key.len() as _) }; - super::read_from_buffer(read_result, anoma_vp_result_buffer) - .and_then(|t| T::try_from_slice(&t[..]).ok()) - } - - /// Try to read a variable-length value as bytesat the given key from - /// storage before transaction execution. - pub fn read_bytes_pre(key: impl AsRef) -> Option> { - let key = key.as_ref(); - let read_result = - unsafe { anoma_vp_read_pre(key.as_ptr() as _, key.len() as _) }; - super::read_from_buffer(read_result, anoma_vp_result_buffer) - } - - /// Try to read a Borsh encoded variable-length value at the given key from - /// storage after transaction execution. - pub fn read_post(key: impl AsRef) -> Option { - let key = key.as_ref(); - let read_result = - unsafe { anoma_vp_read_post(key.as_ptr() as _, key.len() as _) }; - super::read_from_buffer(read_result, anoma_vp_result_buffer) - .and_then(|t| T::try_from_slice(&t[..]).ok()) - } - - /// Try to read a variable-length value as bytes at the given key from - /// storage after transaction execution. - pub fn read_bytes_post(key: impl AsRef) -> Option> { - let key = key.as_ref(); - let read_result = - unsafe { anoma_vp_read_post(key.as_ptr() as _, key.len() as _) }; - super::read_from_buffer(read_result, anoma_vp_result_buffer) - } - - /// Try to read a Borsh encoded variable-length value at the given key from - /// storage before transaction execution. - pub fn read_temp(key: impl AsRef) -> Option { - let key = key.as_ref(); - let read_result = - unsafe { anoma_vp_read_temp(key.as_ptr() as _, key.len() as _) }; - super::read_from_buffer(read_result, anoma_vp_result_buffer) - .and_then(|t| T::try_from_slice(&t[..]).ok()) - } - - /// Try to read a variable-length value as bytes at the given key from - /// storage before transaction execution. - pub fn read_bytes_temp(key: impl AsRef) -> Option> { - let key = key.as_ref(); - let read_result = - unsafe { anoma_vp_read_temp(key.as_ptr() as _, key.len() as _) }; - super::read_from_buffer(read_result, anoma_vp_result_buffer) - } - - /// Check if the given key was present in storage before transaction - /// execution. - pub fn has_key_pre(key: impl AsRef) -> bool { - let key = key.as_ref(); - let found = - unsafe { anoma_vp_has_key_pre(key.as_ptr() as _, key.len() as _) }; - HostEnvResult::is_success(found) - } - - /// Check if the given key is present in storage after transaction - /// execution. - pub fn has_key_post(key: impl AsRef) -> bool { - let key = key.as_ref(); - let found = - unsafe { anoma_vp_has_key_post(key.as_ptr() as _, key.len() as _) }; - HostEnvResult::is_success(found) - } - - /// Get an iterator with the given prefix before transaction execution - pub fn iter_prefix_pre( - prefix: impl AsRef, - ) -> PreKeyValIterator { - let prefix = prefix.as_ref(); - let iter_id = unsafe { - anoma_vp_iter_prefix(prefix.as_ptr() as _, prefix.len() as _) - }; - PreKeyValIterator(iter_id, PhantomData) - } - - impl Iterator for PreKeyValIterator { - type Item = (String, T); - - fn next(&mut self) -> Option<(String, T)> { - let read_result = unsafe { anoma_vp_iter_pre_next(self.0) }; - super::read_key_val_from_buffer(read_result, anoma_vp_result_buffer) - } - } - - /// Get an iterator with the given prefix after transaction execution - pub fn iter_prefix_post( - prefix: impl AsRef, - ) -> PostKeyValIterator { - let prefix = prefix.as_ref(); - let iter_id = unsafe { - anoma_vp_iter_prefix(prefix.as_ptr() as _, prefix.len() as _) - }; - PostKeyValIterator(iter_id, PhantomData) - } - - impl Iterator for PostKeyValIterator { - type Item = (String, T); - - fn next(&mut self) -> Option<(String, T)> { - let read_result = unsafe { anoma_vp_iter_post_next(self.0) }; - super::read_key_val_from_buffer(read_result, anoma_vp_result_buffer) - } - } - - /// Get the chain ID - pub fn get_chain_id() -> String { - let result = Vec::with_capacity(CHAIN_ID_LENGTH); - unsafe { - anoma_vp_get_chain_id(result.as_ptr() as _); - } - let slice = - unsafe { slice::from_raw_parts(result.as_ptr(), CHAIN_ID_LENGTH) }; - String::from_utf8(slice.to_vec()).expect("Cannot convert the ID string") - } - - /// Get height of the current block - pub fn get_block_height() -> BlockHeight { - BlockHeight(unsafe { anoma_vp_get_block_height() }) - } - - /// Get a block hash - pub fn get_block_hash() -> BlockHash { - let result = Vec::with_capacity(BLOCK_HASH_LENGTH); - unsafe { - anoma_vp_get_block_hash(result.as_ptr() as _); - } - let slice = unsafe { - slice::from_raw_parts(result.as_ptr(), BLOCK_HASH_LENGTH) - }; - BlockHash::try_from(slice).expect("Cannot convert the hash") - } - - /// Get a tx hash - pub fn get_tx_code_hash() -> Hash { - let result = Vec::with_capacity(HASH_LENGTH); - unsafe { - anoma_vp_get_tx_code_hash(result.as_ptr() as _); - } - let slice = - unsafe { slice::from_raw_parts(result.as_ptr(), HASH_LENGTH) }; - Hash::try_from(slice).expect("Cannot convert the hash") - } - - /// Get epoch of the current block - pub fn get_block_epoch() -> Epoch { - Epoch(unsafe { anoma_vp_get_block_epoch() }) - } - - /// Verify a transaction signature. The signature is expected to have been - /// produced on the encoded transaction [`namada::proto::Tx`] - /// using [`namada::proto::Tx::sign`]. - pub fn verify_tx_signature( - pk: &common::PublicKey, - sig: &common::Signature, - ) -> bool { - let pk = BorshSerialize::try_to_vec(pk).unwrap(); - let sig = BorshSerialize::try_to_vec(sig).unwrap(); - let valid = unsafe { - anoma_vp_verify_tx_signature( - pk.as_ptr() as _, - pk.len() as _, - sig.as_ptr() as _, - sig.len() as _, - ) - }; - HostEnvResult::is_success(valid) - } - - /// Log a string. The message will be printed at the `tracing::Level::Info`. - pub fn log_string>(msg: T) { - let msg = msg.as_ref(); - unsafe { - anoma_vp_log_string(msg.as_ptr() as _, msg.len() as _); - } - } - - /// Evaluate a validity predicate with given data. The address, changed - /// storage keys and verifiers will have the same values as the input to - /// caller's validity predicate. - /// - /// If the execution fails for whatever reason, this will return `false`. - /// Otherwise returns the result of evaluation. - pub fn eval(vp_code: Vec, input_data: Vec) -> bool { - let result = unsafe { - anoma_vp_eval( - vp_code.as_ptr() as _, - vp_code.len() as _, - input_data.as_ptr() as _, - input_data.len() as _, - ) - }; - HostEnvResult::is_success(result) - } - - // These host functions are implemented in the Anoma's [`host_env`] - // module. The environment provides calls to them via this C interface. - extern "C" { - // Read variable-length prior state when we don't know the size - // up-front, returns the size of the value (can be 0), or -1 if - // the key is not present. If a value is found, it will be placed in the - // result buffer, because we cannot allocate a buffer for it before - // we know its size. - fn anoma_vp_read_pre(key_ptr: u64, key_len: u64) -> i64; - - // Read variable-length posterior state when we don't know the size - // up-front, returns the size of the value (can be 0), or -1 if - // the key is not present. If a value is found, it will be placed in the - // result buffer, because we cannot allocate a buffer for it before - // we know its size. - fn anoma_vp_read_post(key_ptr: u64, key_len: u64) -> i64; - - // Read variable-length temporary state when we don't know the size - // up-front, returns the size of the value (can be 0), or -1 if - // the key is not present. If a value is found, it will be placed in the - // result buffer, because we cannot allocate a buffer for it before - // we know its size. - fn anoma_vp_read_temp(key_ptr: u64, key_len: u64) -> i64; - - // Read a value from result buffer. - fn anoma_vp_result_buffer(result_ptr: u64); - - // Returns 1 if the key is present in prior state, -1 otherwise. - fn anoma_vp_has_key_pre(key_ptr: u64, key_len: u64) -> i64; - - // Returns 1 if the key is present in posterior state, -1 otherwise. - fn anoma_vp_has_key_post(key_ptr: u64, key_len: u64) -> i64; - - // Get an ID of a data iterator with key prefix - fn anoma_vp_iter_prefix(prefix_ptr: u64, prefix_len: u64) -> u64; - - // Read variable-length prior state when we don't know the size - // up-front, returns the size of the value (can be 0), or -1 if - // the key is not present. If a value is found, it will be placed in the - // result buffer, because we cannot allocate a buffer for it before - // we know its size. - fn anoma_vp_iter_pre_next(iter_id: u64) -> i64; - - // Read variable-length posterior state when we don't know the size - // up-front, returns the size of the value (can be 0), or -1 if the - // key is not present. If a value is found, it will be placed in the - // result buffer, because we cannot allocate a buffer for it before - // we know its size. - fn anoma_vp_iter_post_next(iter_id: u64) -> i64; - - // Get the chain ID - fn anoma_vp_get_chain_id(result_ptr: u64); - - // Get the current block height - fn anoma_vp_get_block_height() -> u64; - - // Get the current block hash - fn anoma_vp_get_block_hash(result_ptr: u64); - - // Get the current tx hash - fn anoma_vp_get_tx_code_hash(result_ptr: u64); - - // Get the current block epoch - fn anoma_vp_get_block_epoch() -> u64; - - // Verify a transaction signature - fn anoma_vp_verify_tx_signature( - pk_ptr: u64, - pk_len: u64, - sig_ptr: u64, - sig_len: u64, - ) -> i64; - - // Requires a node running with "Info" log level - fn anoma_vp_log_string(str_ptr: u64, str_len: u64); - - fn anoma_vp_eval( - vp_code_ptr: u64, - vp_code_len: u64, - input_data_ptr: u64, - input_data_len: u64, - ) -> i64; - } -} diff --git a/vm_env/src/intent.rs b/vm_env/src/intent.rs deleted file mode 100644 index 226cb708db..0000000000 --- a/vm_env/src/intent.rs +++ /dev/null @@ -1,39 +0,0 @@ -use std::collections::HashSet; - -use namada::proto::Signed; -use namada::types::intent; -use namada::types::key::*; - -/// Tx imports and functions. -pub mod tx { - pub use namada::types::intent::*; - - use super::*; - pub fn invalidate_exchange(intent: &Signed) { - use crate::imports::tx; - let key = intent::invalid_intent_key(&intent.data.addr); - let mut invalid_intent: HashSet = - tx::read(&key.to_string()).unwrap_or_default(); - invalid_intent.insert(intent.sig.clone()); - tx::write(&key.to_string(), &invalid_intent) - } -} - -/// Vp imports and functions. -pub mod vp { - pub use namada::types::intent::*; - - use super::*; - - pub fn vp_exchange(intent: &Signed) -> bool { - use crate::imports::vp; - let key = intent::invalid_intent_key(&intent.data.addr); - - let invalid_intent_pre: HashSet = - vp::read_pre(&key.to_string()).unwrap_or_default(); - let invalid_intent_post: HashSet = - vp::read_post(&key.to_string()).unwrap_or_default(); - !invalid_intent_pre.contains(&intent.sig) - && invalid_intent_post.contains(&intent.sig) - } -} diff --git a/vm_env/src/key/ed25519.rs b/vm_env/src/key/ed25519.rs deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/vm_env/src/key/mod.rs b/vm_env/src/key/mod.rs deleted file mode 100644 index 30aea96c46..0000000000 --- a/vm_env/src/key/mod.rs +++ /dev/null @@ -1,16 +0,0 @@ -use namada::types::address::Address; - -/// Vp imports and functions. -pub mod vp { - pub use namada::types::key::*; - - use super::*; - use crate::imports::vp; - - /// Get the public key associated with the given address. Panics if not - /// found. - pub fn get(owner: &Address) -> Option { - let key = pk_key(owner).to_string(); - vp::read_pre(&key) - } -} diff --git a/vm_env/src/lib.rs b/vm_env/src/lib.rs index 41ea097c35..1421dbde48 100644 --- a/vm_env/src/lib.rs +++ b/vm_env/src/lib.rs @@ -1,56 +1,241 @@ -//! This crate contains library code for wasm. Some of the code is re-exported -//! from the `shared` crate. +//! This crate contains the WASM VM low-level interface. #![doc(html_favicon_url = "https://dev.anoma.net/master/favicon.png")] #![doc(html_logo_url = "https://dev.anoma.net/master/rustdoc-logo.png")] #![deny(rustdoc::broken_intra_doc_links)] #![deny(rustdoc::private_intra_doc_links)] -pub mod governance; -pub mod ibc; -pub mod imports; -pub mod intent; -pub mod key; -pub mod nft; -pub mod proof_of_stake; -pub mod token; - -pub mod tx_prelude { - pub use namada::ledger::eth_bridge::storage::bridge_pool; - pub use namada::ledger::governance::storage; - pub use namada::ledger::parameters::storage as parameters_storage; - pub use namada::ledger::storage::types::encode; - pub use namada::ledger::treasury::storage as treasury_storage; - pub use namada::proto::{Signed, SignedTxData}; - pub use namada::types::address::Address; - pub use namada::types::storage::Key; - pub use namada::types::*; - pub use namada_macros::transaction; - - pub use crate::governance::tx as governance; - pub use crate::ibc::{Ibc, IbcActions}; - pub use crate::imports::tx::*; - pub use crate::intent::tx as intent; - pub use crate::nft::tx as nft; - pub use crate::proof_of_stake::{self, PoS, PosRead, PosWrite}; - pub use crate::token::tx as token; +use std::mem::ManuallyDrop; + +use borsh::BorshDeserialize; +use namada::types::internal::HostEnvResult; +use namada::vm::types::KeyVal; + +/// Transaction environment imports +pub mod tx { + // These host functions are implemented in the Anoma's [`host_env`] + // module. The environment provides calls to them via this C interface. + extern "C" { + // Read variable-length data when we don't know the size up-front, + // returns the size of the value (can be 0), or -1 if the key is + // not present. If a value is found, it will be placed in the read + // cache, because we cannot allocate a buffer for it before we know + // its size. + pub fn anoma_tx_read(key_ptr: u64, key_len: u64) -> i64; + + // Read a value from result buffer. + pub fn anoma_tx_result_buffer(result_ptr: u64); + + // Returns 1 if the key is present, -1 otherwise. + pub fn anoma_tx_has_key(key_ptr: u64, key_len: u64) -> i64; + + // Write key/value + pub fn anoma_tx_write( + key_ptr: u64, + key_len: u64, + val_ptr: u64, + val_len: u64, + ); + + // Write a temporary key/value + pub fn anoma_tx_write_temp( + key_ptr: u64, + key_len: u64, + val_ptr: u64, + val_len: u64, + ); + + // Delete the given key and its value + pub fn anoma_tx_delete(key_ptr: u64, key_len: u64); + + // Get an ID of a data iterator with key prefix, ordered by storage + // keys. + pub fn anoma_tx_iter_prefix(prefix_ptr: u64, prefix_len: u64) -> u64; + + // Get an ID of a data iterator with key prefix, reverse ordered by + // storage keys. + pub fn anoma_tx_rev_iter_prefix( + prefix_ptr: u64, + prefix_len: u64, + ) -> u64; + + // Returns the size of the value (can be 0), or -1 if there's no next + // value. If a value is found, it will be placed in the read + // cache, because we cannot allocate a buffer for it before we know + // its size. + pub fn anoma_tx_iter_next(iter_id: u64) -> i64; + + // Insert a verifier + pub fn anoma_tx_insert_verifier(addr_ptr: u64, addr_len: u64); + + // Update a validity predicate + pub fn anoma_tx_update_validity_predicate( + addr_ptr: u64, + addr_len: u64, + code_ptr: u64, + code_len: u64, + ); + + // Initialize a new account + pub fn anoma_tx_init_account( + code_ptr: u64, + code_len: u64, + result_ptr: u64, + ); + + // Emit an IBC event + pub fn anoma_tx_emit_ibc_event(event_ptr: u64, event_len: u64); + + // Get the chain ID + pub fn anoma_tx_get_chain_id(result_ptr: u64); + + // Get the current block height + pub fn anoma_tx_get_block_height() -> u64; + + // Get the time of the current block header + pub fn anoma_tx_get_block_time() -> i64; + + // Get the current block hash + pub fn anoma_tx_get_block_hash(result_ptr: u64); + + // Get the current block epoch + pub fn anoma_tx_get_block_epoch() -> u64; + + // Requires a node running with "Info" log level + pub fn anoma_tx_log_string(str_ptr: u64, str_len: u64); + } +} + +/// Validity predicate environment imports +pub mod vp { + // These host functions are implemented in the Anoma's [`host_env`] + // module. The environment provides calls to them via this C interface. + extern "C" { + // Read variable-length prior state when we don't know the size + // up-front, returns the size of the value (can be 0), or -1 if + // the key is not present. If a value is found, it will be placed in the + // result buffer, because we cannot allocate a buffer for it before + // we know its size. + pub fn anoma_vp_read_pre(key_ptr: u64, key_len: u64) -> i64; + + // Read variable-length posterior state when we don't know the size + // up-front, returns the size of the value (can be 0), or -1 if + // the key is not present. If a value is found, it will be placed in the + // result buffer, because we cannot allocate a buffer for it before + // we know its size. + pub fn anoma_vp_read_post(key_ptr: u64, key_len: u64) -> i64; + + // Read variable-length temporary state when we don't know the size + // up-front, returns the size of the value (can be 0), or -1 if + // the key is not present. If a value is found, it will be placed in the + // result buffer, because we cannot allocate a buffer for it before + // we know its size. + pub fn anoma_vp_read_temp(key_ptr: u64, key_len: u64) -> i64; + + // Read a value from result buffer. + pub fn anoma_vp_result_buffer(result_ptr: u64); + + // Returns 1 if the key is present in prior state, -1 otherwise. + pub fn anoma_vp_has_key_pre(key_ptr: u64, key_len: u64) -> i64; + + // Returns 1 if the key is present in posterior state, -1 otherwise. + pub fn anoma_vp_has_key_post(key_ptr: u64, key_len: u64) -> i64; + + // Get an ID of a data iterator with key prefix, ordered by storage + // keys. + pub fn anoma_vp_iter_prefix(prefix_ptr: u64, prefix_len: u64) -> u64; + + // Get an ID of a data iterator with key prefix, reverse ordered by + // storage keys. + pub fn anoma_vp_rev_iter_prefix( + prefix_ptr: u64, + prefix_len: u64, + ) -> u64; + + // Read variable-length prior state when we don't know the size + // up-front, returns the size of the value (can be 0), or -1 if + // the key is not present. If a value is found, it will be placed in the + // result buffer, because we cannot allocate a buffer for it before + // we know its size. + pub fn anoma_vp_iter_pre_next(iter_id: u64) -> i64; + + // Read variable-length posterior state when we don't know the size + // up-front, returns the size of the value (can be 0), or -1 if the + // key is not present. If a value is found, it will be placed in the + // result buffer, because we cannot allocate a buffer for it before + // we know its size. + pub fn anoma_vp_iter_post_next(iter_id: u64) -> i64; + + // Get the chain ID + pub fn anoma_vp_get_chain_id(result_ptr: u64); + + // Get the current block height + pub fn anoma_vp_get_block_height() -> u64; + + // Get the current block hash + pub fn anoma_vp_get_block_hash(result_ptr: u64); + + // Get the current tx hash + pub fn anoma_vp_get_tx_code_hash(result_ptr: u64); + + // Get the current block epoch + pub fn anoma_vp_get_block_epoch() -> u64; + + // Verify a transaction signature + pub fn anoma_vp_verify_tx_signature( + pk_ptr: u64, + pk_len: u64, + sig_ptr: u64, + sig_len: u64, + ) -> i64; + + // Requires a node running with "Info" log level + pub fn anoma_vp_log_string(str_ptr: u64, str_len: u64); + + pub fn anoma_vp_eval( + vp_code_ptr: u64, + vp_code_len: u64, + input_data_ptr: u64, + input_data_len: u64, + ) -> i64; + } +} + +/// This function is a helper to handle the second step of reading var-len +/// values from the host. +/// +/// In cases where we're reading a value from the host in the guest and +/// we don't know the byte size up-front, we have to read it in 2-steps. The +/// first step reads the value into a result buffer and returns the size (if +/// any) back to the guest, the second step reads the value from cache into a +/// pre-allocated buffer with the obtained size. +pub fn read_from_buffer( + read_result: i64, + result_buffer: unsafe extern "C" fn(u64), +) -> Option> { + if HostEnvResult::is_fail(read_result) { + None + } else { + let result: Vec = Vec::with_capacity(read_result as _); + // The `result` will be dropped from the `target`, which is + // reconstructed from the same memory + let result = ManuallyDrop::new(result); + let offset = result.as_slice().as_ptr() as u64; + unsafe { result_buffer(offset) }; + let target = unsafe { + Vec::from_raw_parts(offset as _, read_result as _, read_result as _) + }; + Some(target) + } } -pub mod vp_prelude { - // used in the VP input - pub use std::collections::{BTreeSet, HashSet}; - - pub use namada::ledger::governance::storage as gov_storage; - pub use namada::ledger::{parameters, pos as proof_of_stake}; - pub use namada::proto::{Signed, SignedTxData}; - pub use namada::types::address::Address; - pub use namada::types::storage::Key; - pub use namada::types::*; - pub use namada_macros::validity_predicate; - - pub use crate::imports::vp::*; - pub use crate::intent::vp as intent; - pub use crate::key::vp as key; - pub use crate::nft::vp as nft; - pub use crate::token::vp as token; +/// This function is a helper to handle the second step of reading var-len +/// values in a key-value pair from the host. +pub fn read_key_val_bytes_from_buffer( + read_result: i64, + result_buffer: unsafe extern "C" fn(u64), +) -> Option<(String, Vec)> { + let key_val = read_from_buffer(read_result, result_buffer) + .and_then(|t| KeyVal::try_from_slice(&t[..]).ok()); + key_val.map(|key_val| (key_val.key, key_val.val)) } diff --git a/vm_env/src/nft.rs b/vm_env/src/nft.rs deleted file mode 100644 index 4a685acd72..0000000000 --- a/vm_env/src/nft.rs +++ /dev/null @@ -1,194 +0,0 @@ -use namada::types::nft; - -/// Tx imports and functions. -pub mod tx { - use namada::types::address::Address; - use namada::types::nft::NftToken; - use namada::types::transaction::nft::{CreateNft, MintNft}; - - use super::*; - use crate::imports::tx; - pub fn init_nft(nft: CreateNft) -> Address { - let address = tx::init_account(&nft.vp_code); - - // write tag - let tag_key = nft::get_tag_key(&address); - tx::write(&tag_key.to_string(), &nft.tag); - - // write creator - let creator_key = nft::get_creator_key(&address); - tx::write(&creator_key.to_string(), &nft.creator); - - // write keys - let keys_key = nft::get_keys_key(&address); - tx::write(&keys_key.to_string(), &nft.keys); - - // write optional keys - let optional_keys_key = nft::get_optional_keys_key(&address); - tx::write(&optional_keys_key.to_string(), nft.opt_keys); - - // mint tokens - aux_mint_token(&address, &nft.creator, nft.tokens, &nft.creator); - - tx::insert_verifier(&nft.creator); - - address - } - - pub fn mint_tokens(nft: MintNft) { - aux_mint_token(&nft.address, &nft.creator, nft.tokens, &nft.creator); - } - - fn aux_mint_token( - nft_address: &Address, - creator_address: &Address, - tokens: Vec, - verifier: &Address, - ) { - for token in tokens { - // write token metadata - let metadata_key = - nft::get_token_metadata_key(nft_address, &token.id.to_string()); - tx::write(&metadata_key.to_string(), &token.metadata); - - // write current owner token as creator - let current_owner_key = nft::get_token_current_owner_key( - nft_address, - &token.id.to_string(), - ); - tx::write( - ¤t_owner_key.to_string(), - &token - .current_owner - .unwrap_or_else(|| creator_address.clone()), - ); - - // write value key - let value_key = - nft::get_token_value_key(nft_address, &token.id.to_string()); - tx::write(&value_key.to_string(), &token.values); - - // write optional value keys - let optional_value_key = nft::get_token_optional_value_key( - nft_address, - &token.id.to_string(), - ); - tx::write(&optional_value_key.to_string(), &token.opt_values); - - // write approval addresses - let approval_key = - nft::get_token_approval_key(nft_address, &token.id.to_string()); - tx::write(&approval_key.to_string(), &token.approvals); - - // write burnt propriety - let burnt_key = - nft::get_token_burnt_key(nft_address, &token.id.to_string()); - tx::write(&burnt_key.to_string(), token.burnt); - } - tx::insert_verifier(verifier); - } -} - -/// A Nft validity predicate -pub mod vp { - use std::collections::BTreeSet; - - use namada::types::address::Address; - pub use namada::types::nft::*; - use namada::types::storage::Key; - - use crate::imports::vp; - - enum KeyType { - Metadata(Address, String), - Approval(Address, String), - CurrentOwner(Address, String), - Creator(Address), - PastOwners(Address, String), - Unknown, - } - - pub fn vp( - _tx_da_ta: Vec, - nft_address: &Address, - keys_changed: &BTreeSet, - verifiers: &BTreeSet
, - ) -> bool { - keys_changed - .iter() - .all(|key| match get_key_type(key, nft_address) { - KeyType::Creator(_creator_addr) => { - vp::log_string("creator cannot be changed."); - false - } - KeyType::Approval(nft_address, token_id) => { - vp::log_string(format!( - "nft vp, checking approvals with token id: {}", - token_id - )); - - is_creator(&nft_address, verifiers) - || is_approved( - &nft_address, - token_id.as_ref(), - verifiers, - ) - } - KeyType::Metadata(nft_address, token_id) => { - vp::log_string(format!( - "nft vp, checking if metadata changed: {}", - token_id - )); - is_creator(&nft_address, verifiers) - } - _ => is_creator(nft_address, verifiers), - }) - } - - fn is_approved( - nft_address: &Address, - nft_token_id: &str, - verifiers: &BTreeSet
, - ) -> bool { - let approvals_key = - get_token_approval_key(nft_address, nft_token_id).to_string(); - let approval_addresses: Vec
= - vp::read_pre(approvals_key).unwrap_or_default(); - return approval_addresses - .iter() - .any(|addr| verifiers.contains(addr)); - } - - fn is_creator( - nft_address: &Address, - verifiers: &BTreeSet
, - ) -> bool { - let creator_key = get_creator_key(nft_address).to_string(); - let creator_address: Address = vp::read_pre(creator_key).unwrap(); - verifiers.contains(&creator_address) - } - - fn get_key_type(key: &Key, nft_address: &Address) -> KeyType { - let is_creator_key = is_nft_creator_key(key, nft_address); - let is_metadata_key = is_nft_metadata_key(key, nft_address); - let is_approval_key = is_nft_approval_key(key, nft_address); - let is_current_owner_key = is_nft_current_owner_key(key, nft_address); - let is_past_owner_key = is_nft_past_owners_key(key, nft_address); - if let Some(nft_address) = is_creator_key { - return KeyType::Creator(nft_address); - } - if let Some((nft_address, token_id)) = is_metadata_key { - return KeyType::Metadata(nft_address, token_id); - } - if let Some((nft_address, token_id)) = is_approval_key { - return KeyType::Approval(nft_address, token_id); - } - if let Some((nft_address, token_id)) = is_current_owner_key { - return KeyType::CurrentOwner(nft_address, token_id); - } - if let Some((nft_address, token_id)) = is_past_owner_key { - return KeyType::PastOwners(nft_address, token_id); - } - KeyType::Unknown - } -} diff --git a/vm_env/src/proof_of_stake.rs b/vm_env/src/proof_of_stake.rs deleted file mode 100644 index 295cf6e692..0000000000 --- a/vm_env/src/proof_of_stake.rs +++ /dev/null @@ -1,297 +0,0 @@ -//! Proof of Stake system integration with functions for transactions - -use namada::ledger::pos::namada_proof_of_stake::{ - BecomeValidatorError, BondError, UnbondError, WithdrawError, -}; -use namada::ledger::pos::types::Slash; -pub use namada::ledger::pos::*; -use namada::ledger::pos::{ - bond_key, namada_proof_of_stake, params_key, total_voting_power_key, - unbond_key, validator_address_raw_hash_key, validator_consensus_key_key, - validator_set_key, validator_slashes_key, - validator_staking_reward_address_key, validator_state_key, - validator_total_deltas_key, validator_voting_power_key, -}; -use namada::types::address::{self, Address, InternalAddress}; -use namada::types::transaction::InitValidator; -use namada::types::{key, token}; -pub use namada_proof_of_stake::{ - epoched, parameters, types, PosActions as PosWrite, PosReadOnly as PosRead, -}; - -use crate::imports::tx; - -/// Self-bond tokens to a validator when `source` is `None` or equal to -/// the `validator` address, or delegate tokens from the `source` to the -/// `validator`. -pub fn bond_tokens( - source: Option<&Address>, - validator: &Address, - amount: token::Amount, -) -> Result<(), BondError
> { - let current_epoch = tx::get_block_epoch(); - PoS.bond_tokens(source, validator, amount, current_epoch) -} - -/// Unbond self-bonded tokens from a validator when `source` is `None` or -/// equal to the `validator` address, or unbond delegated tokens from -/// the `source` to the `validator`. -pub fn unbond_tokens( - source: Option<&Address>, - validator: &Address, - amount: token::Amount, -) -> Result<(), UnbondError> { - let current_epoch = tx::get_block_epoch(); - PoS.unbond_tokens(source, validator, amount, current_epoch) -} - -/// Withdraw unbonded tokens from a self-bond to a validator when `source` -/// is `None` or equal to the `validator` address, or withdraw unbonded -/// tokens delegated to the `validator` to the `source`. -pub fn withdraw_tokens( - source: Option<&Address>, - validator: &Address, -) -> Result> { - let current_epoch = tx::get_block_epoch(); - PoS.withdraw_tokens(source, validator, current_epoch) -} - -/// Attempt to initialize a validator account. On success, returns the -/// initialized validator account's address and its staking reward address. -pub fn init_validator( - InitValidator { - account_key, - consensus_key, - eth_cold_key, - eth_hot_key, - rewards_account_key, - protocol_key, - dkg_key, - validator_vp_code, - rewards_vp_code, - }: InitValidator, -) -> Result<(Address, Address), BecomeValidatorError
> { - let current_epoch = tx::get_block_epoch(); - // Init validator account - let validator_address = tx::init_account(&validator_vp_code); - let pk_key = key::pk_key(&validator_address); - tx::write(&pk_key.to_string(), &account_key); - let protocol_pk_key = key::protocol_pk_key(&validator_address); - tx::write(&protocol_pk_key.to_string(), &protocol_key); - let dkg_pk_key = key::dkg_session_keys::dkg_pk_key(&validator_address); - tx::write(&dkg_pk_key.to_string(), &dkg_key); - - // Init staking reward account - let rewards_address = tx::init_account(&rewards_vp_code); - let pk_key = key::pk_key(&rewards_address); - tx::write(&pk_key.to_string(), &rewards_account_key); - - let eth_cold_key = key::common::PublicKey::Secp256k1(eth_cold_key); - let eth_hot_key = key::common::PublicKey::Secp256k1(eth_hot_key); - PoS.become_validator( - &validator_address, - &rewards_address, - &consensus_key, - ð_cold_key, - ð_hot_key, - current_epoch, - )?; - Ok((validator_address, rewards_address)) -} - -/// Proof of Stake system. This struct integrates and gives access to -/// lower-level PoS functions. -pub struct PoS; - -impl namada_proof_of_stake::PosReadOnly for PoS { - type Address = Address; - type PublicKey = key::common::PublicKey; - type TokenAmount = token::Amount; - type TokenChange = token::Change; - - const POS_ADDRESS: Self::Address = Address::Internal(InternalAddress::PoS); - - fn staking_token_address() -> Self::Address { - address::xan() - } - - fn read_pos_params(&self) -> PosParams { - tx::read(params_key().to_string()).unwrap() - } - - fn read_validator_staking_reward_address( - &self, - key: &Self::Address, - ) -> Option { - tx::read(validator_staking_reward_address_key(key).to_string()) - } - - fn read_validator_consensus_key( - &self, - key: &Self::Address, - ) -> Option { - tx::read(validator_consensus_key_key(key).to_string()) - } - - fn read_validator_state( - &self, - key: &Self::Address, - ) -> Option { - tx::read(validator_state_key(key).to_string()) - } - - fn read_validator_total_deltas( - &self, - key: &Self::Address, - ) -> Option { - tx::read(validator_total_deltas_key(key).to_string()) - } - - fn read_validator_voting_power( - &self, - key: &Self::Address, - ) -> Option { - tx::read(validator_voting_power_key(key).to_string()) - } - - fn read_validator_slashes(&self, key: &Self::Address) -> Vec { - tx::read(validator_slashes_key(key).to_string()).unwrap_or_default() - } - - fn read_bond(&self, key: &BondId) -> Option { - tx::read(bond_key(key).to_string()) - } - - fn read_unbond(&self, key: &BondId) -> Option { - tx::read(unbond_key(key).to_string()) - } - - fn read_validator_set(&self) -> ValidatorSets { - tx::read(validator_set_key().to_string()).unwrap() - } - - fn read_total_voting_power(&self) -> TotalVotingPowers { - tx::read(total_voting_power_key().to_string()).unwrap() - } - - fn read_validator_eth_cold_key( - &self, - key: &Self::Address, - ) -> Option> { - tx::read(validator_eth_cold_key_key(key).to_string()) - } - - fn read_validator_eth_hot_key( - &self, - key: &Self::Address, - ) -> Option> { - tx::read(validator_eth_hot_key_key(key).to_string()) - } -} - -impl namada_proof_of_stake::PosActions for PoS { - fn write_pos_params(&mut self, params: &PosParams) { - tx::write(params_key().to_string(), params) - } - - fn write_validator_address_raw_hash(&mut self, address: &Self::Address) { - let raw_hash = address.raw_hash().unwrap().to_owned(); - tx::write( - validator_address_raw_hash_key(raw_hash).to_string(), - address, - ) - } - - fn write_validator_staking_reward_address( - &mut self, - key: &Self::Address, - value: Self::Address, - ) { - tx::write( - validator_staking_reward_address_key(key).to_string(), - &value, - ) - } - - fn write_validator_consensus_key( - &mut self, - key: &Self::Address, - value: ValidatorConsensusKeys, - ) { - tx::write(validator_consensus_key_key(key).to_string(), &value) - } - - fn write_validator_state( - &mut self, - key: &Self::Address, - value: ValidatorStates, - ) { - tx::write(validator_state_key(key).to_string(), &value) - } - - fn write_validator_total_deltas( - &mut self, - key: &Self::Address, - value: ValidatorTotalDeltas, - ) { - tx::write(validator_total_deltas_key(key).to_string(), &value) - } - - fn write_validator_voting_power( - &mut self, - key: &Self::Address, - value: ValidatorVotingPowers, - ) { - tx::write(validator_voting_power_key(key).to_string(), &value) - } - - fn write_bond(&mut self, key: &BondId, value: Bonds) { - tx::write(bond_key(key).to_string(), &value) - } - - fn write_unbond(&mut self, key: &BondId, value: Unbonds) { - tx::write(unbond_key(key).to_string(), &value) - } - - fn write_validator_set(&mut self, value: ValidatorSets) { - tx::write(validator_set_key().to_string(), &value) - } - - fn write_total_voting_power(&mut self, value: TotalVotingPowers) { - tx::write(total_voting_power_key().to_string(), &value) - } - - fn delete_bond(&mut self, key: &BondId) { - tx::delete(bond_key(key).to_string()) - } - - fn delete_unbond(&mut self, key: &BondId) { - tx::delete(unbond_key(key).to_string()) - } - - fn transfer( - &mut self, - token: &Self::Address, - amount: Self::TokenAmount, - src: &Self::Address, - dest: &Self::Address, - ) { - crate::token::tx::transfer(src, dest, token, None, amount) - } - - fn write_validator_eth_cold_key( - &mut self, - address: &Self::Address, - value: types::ValidatorEthKey, - ) { - tx::write(validator_eth_cold_key_key(address).to_string(), &value) - } - - fn write_validator_eth_hot_key( - &self, - address: &Self::Address, - value: types::ValidatorEthKey, - ) { - tx::write(validator_eth_hot_key_key(address).to_string(), &value) - } -} diff --git a/vm_env/src/token.rs b/vm_env/src/token.rs deleted file mode 100644 index 5ba2d7e33f..0000000000 --- a/vm_env/src/token.rs +++ /dev/null @@ -1,179 +0,0 @@ -use std::collections::BTreeSet; - -use namada::types::address::{Address, InternalAddress}; -use namada::types::storage::Key; -use namada::types::token; - -/// Vp imports and functions. -pub mod vp { - use namada::types::storage::KeySeg; - pub use namada::types::token::*; - - use super::*; - use crate::imports::vp; - - /// A token validity predicate. - pub fn vp( - token: &Address, - keys_changed: &BTreeSet, - verifiers: &BTreeSet
, - ) -> bool { - let mut change: Change = 0; - let all_checked = keys_changed.iter().all(|key| { - let owner: Option<&Address> = - match token::is_multitoken_balance_key(token, key) { - Some((_, o)) => Some(o), - None => token::is_balance_key(token, key), - }; - match owner { - None => { - // Unknown changes to this address space are disallowed, but - // unknown changes anywhere else are permitted - key.segments.get(0) != Some(&token.to_db_key()) - } - Some(owner) => { - // accumulate the change - let key = key.to_string(); - let pre: Amount = match owner { - Address::Internal(InternalAddress::IbcMint) => { - Amount::max() - } - Address::Internal(InternalAddress::IbcBurn) => { - Amount::default() - } - _ => vp::read_pre(&key).unwrap_or_default(), - }; - let post: Amount = match owner { - Address::Internal(InternalAddress::IbcMint) => { - vp::read_temp(&key).unwrap_or_else(Amount::max) - } - Address::Internal(InternalAddress::IbcBurn) => { - vp::read_temp(&key).unwrap_or_default() - } - _ => vp::read_post(&key).unwrap_or_default(), - }; - let this_change = post.change() - pre.change(); - change += this_change; - // make sure that the spender approved the transaction - if this_change < 0 { - return verifiers.contains(owner); - } - true - } - } - }); - all_checked && change == 0 - } -} - -/// Tx imports and functions. -pub mod tx { - pub use namada::types::token::*; - - use super::*; - use crate::imports::tx; - - /// A token transfer that can be used in a transaction. - pub fn transfer( - src: &Address, - dest: &Address, - token: &Address, - sub_prefix: Option, - amount: Amount, - ) { - let (src_key, dest_key) = match &sub_prefix { - Some(sub_prefix) => { - let prefix = - token::multitoken_balance_prefix(token, sub_prefix); - ( - token::multitoken_balance_key(&prefix, src), - token::multitoken_balance_key(&prefix, dest), - ) - } - None => ( - token::balance_key(token, src), - token::balance_key(token, dest), - ), - }; - let src_bal: Option = tx::read(&src_key.to_string()); - match src_bal { - None => { - tx::log_string(format!("src {} has no balance", src)); - unreachable!() - } - Some(mut src_bal) => { - src_bal.spend(&amount); - let mut dest_bal: Amount = - tx::read(&dest_key.to_string()).unwrap_or_default(); - dest_bal.receive(&amount); - tx::write(&src_key.to_string(), src_bal); - tx::write(&dest_key.to_string(), dest_bal); - } - } - } - - /// A token transfer with storage keys that can be used in a transaction. - pub fn transfer_with_keys(src_key: &Key, dest_key: &Key, amount: Amount) { - let src_owner = is_any_multitoken_balance_key(src_key).map(|(_, o)| o); - let src_bal: Option = match src_owner { - Some(Address::Internal(InternalAddress::IbcMint)) => { - Some(Amount::max()) - } - Some(Address::Internal(InternalAddress::IbcBurn)) => { - tx::log_string("invalid transfer from the burn address"); - unreachable!() - } - Some(_) => tx::read(&src_key.to_string()), - None => { - // the key is not a multitoken key - match is_any_token_balance_key(src_key) { - Some(_) => tx::read(src_key.to_string()), - None => { - tx::log_string(format!( - "invalid balance key: {}", - src_key - )); - unreachable!() - } - } - } - }; - let mut src_bal = src_bal.unwrap_or_else(|| { - tx::log_string(format!("src {} has no balance", src_key)); - unreachable!() - }); - src_bal.spend(&amount); - let dest_owner = - is_any_multitoken_balance_key(dest_key).map(|(_, o)| o); - let mut dest_bal: Amount = match dest_owner { - Some(Address::Internal(InternalAddress::IbcMint)) => { - tx::log_string("invalid transfer to the mint address"); - unreachable!() - } - Some(_) => tx::read(dest_key.to_string()).unwrap_or_default(), - None => match is_any_token_balance_key(dest_key) { - Some(_) => tx::read(dest_key.to_string()).unwrap_or_default(), - None => { - tx::log_string(format!( - "invalid balance key: {}", - dest_key - )); - unreachable!() - } - }, - }; - dest_bal.receive(&amount); - match src_owner { - Some(Address::Internal(InternalAddress::IbcMint)) => { - tx::write_temp(&src_key.to_string(), src_bal) - } - _ => tx::write(&src_key.to_string(), src_bal), - } - match dest_owner { - Some(Address::Internal(InternalAddress::IbcBurn)) => { - tx::write_temp(&dest_key.to_string(), dest_bal) - } - _ => tx::write(&dest_key.to_string(), dest_bal), - } - } -} diff --git a/vp_prelude/Cargo.toml b/vp_prelude/Cargo.toml index d915826e49..f270a17d9f 100644 --- a/vp_prelude/Cargo.toml +++ b/vp_prelude/Cargo.toml @@ -4,11 +4,15 @@ edition = "2021" license = "GPL-3.0" name = "namada_vp_prelude" resolver = "2" -version = "0.7.1" +version = "0.8.1" [features] default = [] [dependencies] +namada = {path = "../shared"} namada_vm_env = {path = "../vm_env"} +namada_macros = {path = "../macros"} +borsh = "0.9.0" sha2 = "0.10.1" +thiserror = "1.0.30" diff --git a/vp_prelude/src/key.rs b/vp_prelude/src/key.rs new file mode 100644 index 0000000000..5ef2a5e28c --- /dev/null +++ b/vp_prelude/src/key.rs @@ -0,0 +1,13 @@ +//! Cryptographic signature keys + +use namada::types::address::Address; +pub use namada::types::key::*; + +use super::*; + +/// Get the public key associated with the given address. Panics if not +/// found. +pub fn get(ctx: &Ctx, owner: &Address) -> EnvResult> { + let key = pk_key(owner); + ctx.read_pre(&key) +} diff --git a/vp_prelude/src/lib.rs b/vp_prelude/src/lib.rs index 957354d848..e6618bc5de 100644 --- a/vp_prelude/src/lib.rs +++ b/vp_prelude/src/lib.rs @@ -6,10 +6,38 @@ #![deny(rustdoc::broken_intra_doc_links)] #![deny(rustdoc::private_intra_doc_links)] +pub mod key; +pub mod nft; +pub mod token; + +// used in the VP input use core::convert::AsRef; +use core::slice; +pub use std::collections::{BTreeSet, HashSet}; +use std::convert::TryFrom; +use std::marker::PhantomData; -use namada_vm_env::vp_prelude::hash::Hash; -pub use namada_vm_env::vp_prelude::*; +pub use borsh::{BorshDeserialize, BorshSerialize}; +pub use namada::ledger::governance::storage as gov_storage; +pub use namada::ledger::storage_api::{ + self, iter_prefix, iter_prefix_bytes, rev_iter_prefix, + rev_iter_prefix_bytes, Error, OptionExt, ResultExt, StorageRead, +}; +pub use namada::ledger::vp_env::VpEnv; +pub use namada::ledger::{parameters, pos as proof_of_stake}; +pub use namada::proto::{Signed, SignedTxData}; +pub use namada::types::address::Address; +use namada::types::chain::CHAIN_ID_LENGTH; +use namada::types::hash::{Hash, HASH_LENGTH}; +use namada::types::internal::HostEnvResult; +use namada::types::key::*; +use namada::types::storage::{ + BlockHash, BlockHeight, Epoch, BLOCK_HASH_LENGTH, +}; +pub use namada::types::*; +pub use namada_macros::validity_predicate; +use namada_vm_env::vp::*; +use namada_vm_env::{read_from_buffer, read_key_val_bytes_from_buffer}; pub use sha2::{Digest, Sha256, Sha384, Sha512}; pub fn sha256(bytes: &[u8]) -> Hash { @@ -17,30 +45,432 @@ pub fn sha256(bytes: &[u8]) -> Hash { Hash(*digest.as_ref()) } -pub fn is_tx_whitelisted() -> bool { - let tx_hash = get_tx_code_hash(); +pub fn is_tx_whitelisted(ctx: &Ctx) -> VpResult { + let tx_hash = ctx.get_tx_code_hash()?; let key = parameters::storage::get_tx_whitelist_storage_key(); - let whitelist: Vec = read_pre(&key.to_string()).unwrap_or_default(); + let whitelist: Vec = ctx.read_pre(&key)?.unwrap_or_default(); // if whitelist is empty, allow any transaction - whitelist.is_empty() || whitelist.contains(&tx_hash.to_string()) + Ok(whitelist.is_empty() || whitelist.contains(&tx_hash.to_string())) } -pub fn is_vp_whitelisted(vp_bytes: &[u8]) -> bool { +pub fn is_vp_whitelisted(ctx: &Ctx, vp_bytes: &[u8]) -> VpResult { let vp_hash = sha256(vp_bytes); let key = parameters::storage::get_vp_whitelist_storage_key(); - let whitelist: Vec = read_pre(&key.to_string()).unwrap_or_default(); + let whitelist: Vec = ctx.read_pre(&key)?.unwrap_or_default(); // if whitelist is empty, allow any transaction - whitelist.is_empty() || whitelist.contains(&vp_hash.to_string()) + Ok(whitelist.is_empty() || whitelist.contains(&vp_hash.to_string())) +} + +/// Log a string. The message will be printed at the `tracing::Level::Info`. +pub fn log_string>(msg: T) { + let msg = msg.as_ref(); + unsafe { + anoma_vp_log_string(msg.as_ptr() as _, msg.len() as _); + } +} + +/// Checks if a proposal id is being executed +pub fn is_proposal_accepted(ctx: &Ctx, proposal_id: u64) -> VpResult { + let proposal_execution_key = + gov_storage::get_proposal_execution_key(proposal_id); + + ctx.has_key_pre(&proposal_execution_key) } -/// Log a string in a debug build. The message will be printed at the -/// `tracing::Level::Info`. Any `debug_log!` statements are only enabled in -/// non optimized builds by default. An optimized build will not execute -/// `debug_log!` statements unless `-C debug-assertions` is passed to the -/// compiler. +/// Checks whether a transaction is valid, which happens in two cases: +/// - tx is whitelisted, or +/// - tx is executed by an approved governance proposal (no need to be +/// whitelisted) +pub fn is_valid_tx(ctx: &Ctx, tx_data: &[u8]) -> VpResult { + if is_tx_whitelisted(ctx)? { + accept() + } else { + let proposal_id = u64::try_from_slice(tx_data).ok(); + + proposal_id.map_or(reject(), |id| is_proposal_accepted(ctx, id)) + } +} + +/// Format and log a string in a debug build. +/// +/// In WASM target debug build, the message will be printed at the +/// `tracing::Level::Info` when executed in the VM. An optimized build will +/// omit any `debug_log!` statements unless `-C debug-assertions` is passed to +/// the compiler. +/// +/// In non-WASM target, the message is simply printed out to stdout. #[macro_export] macro_rules! debug_log { ($($arg:tt)*) => {{ - (if cfg!(debug_assertions) { log_string(format!($($arg)*)) }) - }} + ( + if cfg!(target_arch = "wasm32") { + if cfg!(debug_assertions) + { + log_string(format!($($arg)*)); + } + } else { + println!($($arg)*); + } + ) + }}; +} + +#[derive(Debug)] +pub struct Ctx(()); + +impl Ctx { + /// Create a host context. The context on WASM side is only provided by + /// the VM once its being executed (in here it's implicit). But + /// because we want to have interface identical with the native + /// VPs, in which the context is explicit, in here we're just + /// using an empty `Ctx` to "fake" it. + /// + /// # Safety + /// + /// When using `#[validity_predicate]` macro from `anoma_macros`, + /// the constructor should not be called from transactions and validity + /// predicates implementation directly - they receive `&Self` as + /// an argument provided by the macro that wrap the low-level WASM + /// interface with Rust native types. + /// + /// Otherwise, this should only be called once to initialize this "fake" + /// context in order to benefit from type-safety of the host environment + /// methods implemented on the context. + #[allow(clippy::new_without_default)] + pub const unsafe fn new() -> Self { + Self(()) + } + + /// Read access to the prior storage (state before tx execution) + /// via [`trait@StorageRead`]. + pub fn pre(&self) -> CtxPreStorageRead<'_> { + CtxPreStorageRead { _ctx: self } + } + + /// Read access to the posterior storage (state after tx execution) + /// via [`trait@StorageRead`]. + pub fn post(&self) -> CtxPostStorageRead<'_> { + CtxPostStorageRead { _ctx: self } + } +} + +/// Read access to the prior storage (state before tx execution) via +/// [`trait@StorageRead`]. +#[derive(Debug)] +pub struct CtxPreStorageRead<'a> { + _ctx: &'a Ctx, +} + +/// Read access to the posterior storage (state after tx execution) via +/// [`trait@StorageRead`]. +#[derive(Debug)] +pub struct CtxPostStorageRead<'a> { + _ctx: &'a Ctx, +} + +/// Result of `VpEnv` or `storage_api::StorageRead` method call +pub type EnvResult = Result; + +/// Validity predicate result +pub type VpResult = EnvResult; + +/// Accept a transaction +pub fn accept() -> VpResult { + Ok(true) +} + +/// Reject a transaction +pub fn reject() -> VpResult { + Ok(false) +} + +#[derive(Debug)] +pub struct KeyValIterator(pub u64, pub PhantomData); + +impl<'view> VpEnv<'view> for Ctx { + type Post = CtxPostStorageRead<'view>; + type Pre = CtxPreStorageRead<'view>; + type PrefixIter = KeyValIterator<(String, Vec)>; + + fn pre(&'view self) -> Self::Pre { + CtxPreStorageRead { _ctx: self } + } + + fn post(&'view self) -> Self::Post { + CtxPostStorageRead { _ctx: self } + } + + fn read_temp( + &self, + key: &storage::Key, + ) -> Result, Error> { + let key = key.to_string(); + let read_result = + unsafe { anoma_vp_read_temp(key.as_ptr() as _, key.len() as _) }; + Ok(read_from_buffer(read_result, anoma_vp_result_buffer) + .and_then(|t| T::try_from_slice(&t[..]).ok())) + } + + fn read_bytes_temp( + &self, + key: &storage::Key, + ) -> Result>, Error> { + let key = key.to_string(); + let read_result = + unsafe { anoma_vp_read_temp(key.as_ptr() as _, key.len() as _) }; + Ok(read_from_buffer(read_result, anoma_vp_result_buffer)) + } + + fn get_chain_id(&'view self) -> Result { + // Both `CtxPreStorageRead` and `CtxPostStorageRead` have the same impl + get_chain_id() + } + + fn get_block_height(&'view self) -> Result { + // Both `CtxPreStorageRead` and `CtxPostStorageRead` have the same impl + get_block_height() + } + + fn get_block_hash(&'view self) -> Result { + // Both `CtxPreStorageRead` and `CtxPostStorageRead` have the same impl + get_block_hash() + } + + fn get_block_epoch(&'view self) -> Result { + // Both `CtxPreStorageRead` and `CtxPostStorageRead` have the same impl + get_block_epoch() + } + + fn iter_prefix( + &self, + prefix: &storage::Key, + ) -> Result { + // Both `CtxPreStorageRead` and `CtxPostStorageRead` have the same impl + iter_prefix_impl(prefix) + } + + fn rev_iter_prefix( + &self, + prefix: &storage::Key, + ) -> Result { + // Both `CtxPreStorageRead` and `CtxPostStorageRead` have the same impl + rev_iter_prefix_impl(prefix) + } + + fn eval( + &self, + vp_code: Vec, + input_data: Vec, + ) -> Result { + let result = unsafe { + anoma_vp_eval( + vp_code.as_ptr() as _, + vp_code.len() as _, + input_data.as_ptr() as _, + input_data.len() as _, + ) + }; + Ok(HostEnvResult::is_success(result)) + } + + fn verify_tx_signature( + &self, + pk: &common::PublicKey, + sig: &common::Signature, + ) -> Result { + let pk = BorshSerialize::try_to_vec(pk).unwrap(); + let sig = BorshSerialize::try_to_vec(sig).unwrap(); + let valid = unsafe { + anoma_vp_verify_tx_signature( + pk.as_ptr() as _, + pk.len() as _, + sig.as_ptr() as _, + sig.len() as _, + ) + }; + Ok(HostEnvResult::is_success(valid)) + } + + fn get_tx_code_hash(&self) -> Result { + let result = Vec::with_capacity(HASH_LENGTH); + unsafe { + anoma_vp_get_tx_code_hash(result.as_ptr() as _); + } + let slice = + unsafe { slice::from_raw_parts(result.as_ptr(), HASH_LENGTH) }; + Ok(Hash::try_from(slice).expect("Cannot convert the hash")) + } +} + +impl StorageRead<'_> for CtxPreStorageRead<'_> { + type PrefixIter = KeyValIterator<(String, Vec)>; + + fn read_bytes(&self, key: &storage::Key) -> Result>, Error> { + let key = key.to_string(); + let read_result = + unsafe { anoma_vp_read_pre(key.as_ptr() as _, key.len() as _) }; + Ok(read_from_buffer(read_result, anoma_vp_result_buffer)) + } + + fn has_key(&self, key: &storage::Key) -> Result { + let key = key.to_string(); + let found = + unsafe { anoma_vp_has_key_pre(key.as_ptr() as _, key.len() as _) }; + Ok(HostEnvResult::is_success(found)) + } + + fn iter_next( + &self, + iter: &mut Self::PrefixIter, + ) -> Result)>, Error> { + let read_result = unsafe { anoma_vp_iter_pre_next(iter.0) }; + Ok(read_key_val_bytes_from_buffer( + read_result, + anoma_vp_result_buffer, + )) + } + + // ---- Methods below share the same implementation in `pre/post` ---- + + fn iter_prefix( + &self, + prefix: &storage::Key, + ) -> Result { + iter_prefix_impl(prefix) + } + + fn rev_iter_prefix( + &self, + prefix: &storage::Key, + ) -> Result { + rev_iter_prefix_impl(prefix) + } + + fn get_chain_id(&self) -> Result { + get_chain_id() + } + + fn get_block_height(&self) -> Result { + get_block_height() + } + + fn get_block_hash(&self) -> Result { + get_block_hash() + } + + fn get_block_epoch(&self) -> Result { + get_block_epoch() + } +} + +impl StorageRead<'_> for CtxPostStorageRead<'_> { + type PrefixIter = KeyValIterator<(String, Vec)>; + + fn read_bytes(&self, key: &storage::Key) -> Result>, Error> { + let key = key.to_string(); + let read_result = + unsafe { anoma_vp_read_post(key.as_ptr() as _, key.len() as _) }; + Ok(read_from_buffer(read_result, anoma_vp_result_buffer)) + } + + fn has_key(&self, key: &storage::Key) -> Result { + let key = key.to_string(); + let found = + unsafe { anoma_vp_has_key_post(key.as_ptr() as _, key.len() as _) }; + Ok(HostEnvResult::is_success(found)) + } + + fn iter_next( + &self, + iter: &mut Self::PrefixIter, + ) -> Result)>, Error> { + let read_result = unsafe { anoma_vp_iter_post_next(iter.0) }; + Ok(read_key_val_bytes_from_buffer( + read_result, + anoma_vp_result_buffer, + )) + } + + // ---- Methods below share the same implementation in `pre/post` ---- + + fn iter_prefix( + &self, + prefix: &storage::Key, + ) -> Result { + iter_prefix_impl(prefix) + } + + fn rev_iter_prefix( + &self, + prefix: &storage::Key, + ) -> storage_api::Result { + rev_iter_prefix_impl(prefix) + } + + fn get_chain_id(&self) -> Result { + get_chain_id() + } + + fn get_block_height(&self) -> Result { + get_block_height() + } + + fn get_block_hash(&self) -> Result { + get_block_hash() + } + + fn get_block_epoch(&self) -> Result { + get_block_epoch() + } +} + +fn iter_prefix_impl( + prefix: &storage::Key, +) -> Result)>, Error> { + let prefix = prefix.to_string(); + let iter_id = unsafe { + anoma_vp_iter_prefix(prefix.as_ptr() as _, prefix.len() as _) + }; + Ok(KeyValIterator(iter_id, PhantomData)) +} + +fn rev_iter_prefix_impl( + prefix: &storage::Key, +) -> Result)>, Error> { + let prefix = prefix.to_string(); + let iter_id = unsafe { + anoma_vp_rev_iter_prefix(prefix.as_ptr() as _, prefix.len() as _) + }; + Ok(KeyValIterator(iter_id, PhantomData)) +} + +fn get_chain_id() -> Result { + let result = Vec::with_capacity(CHAIN_ID_LENGTH); + unsafe { + anoma_vp_get_chain_id(result.as_ptr() as _); + } + let slice = + unsafe { slice::from_raw_parts(result.as_ptr(), CHAIN_ID_LENGTH) }; + Ok( + String::from_utf8(slice.to_vec()) + .expect("Cannot convert the ID string"), + ) +} + +fn get_block_height() -> Result { + Ok(BlockHeight(unsafe { anoma_vp_get_block_height() })) +} + +fn get_block_hash() -> Result { + let result = Vec::with_capacity(BLOCK_HASH_LENGTH); + unsafe { + anoma_vp_get_block_hash(result.as_ptr() as _); + } + let slice = + unsafe { slice::from_raw_parts(result.as_ptr(), BLOCK_HASH_LENGTH) }; + Ok(BlockHash::try_from(slice).expect("Cannot convert the hash")) +} + +fn get_block_epoch() -> Result { + Ok(Epoch(unsafe { anoma_vp_get_block_epoch() })) } diff --git a/vp_prelude/src/nft.rs b/vp_prelude/src/nft.rs new file mode 100644 index 0000000000..1d5d019169 --- /dev/null +++ b/vp_prelude/src/nft.rs @@ -0,0 +1,116 @@ +//! NFT validity predicate + +use std::collections::BTreeSet; + +use namada::ledger::native_vp::VpEnv; +use namada::types::address::Address; +pub use namada::types::nft::*; +use namada::types::storage::Key; + +use super::{accept, reject, Ctx, EnvResult, VpResult}; + +enum KeyType { + Metadata(Address, String), + Approval(Address, String), + CurrentOwner(Address, String), + Creator(Address), + PastOwners(Address, String), + Unknown, +} + +pub fn vp( + ctx: &Ctx, + _tx_da_ta: Vec, + nft_address: &Address, + keys_changed: &BTreeSet, + verifiers: &BTreeSet
, +) -> VpResult { + for key in keys_changed { + match get_key_type(key, nft_address) { + KeyType::Creator(_creator_addr) => { + super::log_string("creator cannot be changed."); + return reject(); + } + KeyType::Approval(nft_address, token_id) => { + super::log_string(format!( + "nft vp, checking approvals with token id: {}", + token_id + )); + + if !(is_creator(ctx, &nft_address, verifiers)? + || is_approved( + ctx, + &nft_address, + token_id.as_ref(), + verifiers, + )?) + { + return reject(); + } + } + KeyType::Metadata(nft_address, token_id) => { + super::log_string(format!( + "nft vp, checking if metadata changed: {}", + token_id + )); + if !is_creator(ctx, &nft_address, verifiers)? { + return reject(); + } + } + _ => { + if !is_creator(ctx, nft_address, verifiers)? { + return reject(); + } + } + } + } + accept() +} + +fn is_approved( + ctx: &Ctx, + nft_address: &Address, + nft_token_id: &str, + verifiers: &BTreeSet
, +) -> EnvResult { + let approvals_key = get_token_approval_key(nft_address, nft_token_id); + let approval_addresses: Vec
= + ctx.read_pre(&approvals_key)?.unwrap_or_default(); + return Ok(approval_addresses + .iter() + .any(|addr| verifiers.contains(addr))); +} + +fn is_creator( + ctx: &Ctx, + nft_address: &Address, + verifiers: &BTreeSet
, +) -> EnvResult { + let creator_key = get_creator_key(nft_address); + let creator_address: Address = ctx.read_pre(&creator_key)?.unwrap(); + Ok(verifiers.contains(&creator_address)) +} + +fn get_key_type(key: &Key, nft_address: &Address) -> KeyType { + let is_creator_key = is_nft_creator_key(key, nft_address); + let is_metadata_key = is_nft_metadata_key(key, nft_address); + let is_approval_key = is_nft_approval_key(key, nft_address); + let is_current_owner_key = is_nft_current_owner_key(key, nft_address); + let is_past_owner_key = is_nft_past_owners_key(key, nft_address); + if let Some(nft_address) = is_creator_key { + return KeyType::Creator(nft_address); + } + if let Some((nft_address, token_id)) = is_metadata_key { + return KeyType::Metadata(nft_address, token_id); + } + if let Some((nft_address, token_id)) = is_approval_key { + return KeyType::Approval(nft_address, token_id); + } + if let Some((nft_address, token_id)) = is_current_owner_key { + return KeyType::CurrentOwner(nft_address, token_id); + } + if let Some((nft_address, token_id)) = is_past_owner_key { + return KeyType::PastOwners(nft_address, token_id); + } + KeyType::Unknown +} diff --git a/vp_prelude/src/token.rs b/vp_prelude/src/token.rs new file mode 100644 index 0000000000..0dcb0b10e9 --- /dev/null +++ b/vp_prelude/src/token.rs @@ -0,0 +1,66 @@ +//! A fungible token validity predicate. + +use std::collections::BTreeSet; + +use namada::types::address::{Address, InternalAddress}; +use namada::types::storage::Key; +/// Vp imports and functions. +use namada::types::storage::KeySeg; +use namada::types::token; +pub use namada::types::token::*; + +use super::*; + +/// A token validity predicate. +pub fn vp( + ctx: &Ctx, + token: &Address, + keys_changed: &BTreeSet, + verifiers: &BTreeSet
, +) -> VpResult { + let mut change: Change = 0; + for key in keys_changed.iter() { + let owner: Option<&Address> = + match token::is_multitoken_balance_key(token, key) { + Some((_, o)) => Some(o), + None => token::is_balance_key(token, key), + }; + match owner { + None => { + // Unknown changes to this address space are disallowed, but + // unknown changes anywhere else are permitted + if key.segments.get(0) == Some(&token.to_db_key()) { + return reject(); + } + } + Some(owner) => { + // accumulate the change + let pre: Amount = match owner { + Address::Internal(InternalAddress::IbcMint) => { + Amount::max() + } + Address::Internal(InternalAddress::IbcBurn) => { + Amount::default() + } + _ => ctx.read_pre(key)?.unwrap_or_default(), + }; + let post: Amount = match owner { + Address::Internal(InternalAddress::IbcMint) => { + ctx.read_temp(key)?.unwrap_or_else(Amount::max) + } + Address::Internal(InternalAddress::IbcBurn) => { + ctx.read_temp(key)?.unwrap_or_default() + } + _ => ctx.read_post(key)?.unwrap_or_default(), + }; + let this_change = post.change() - pre.change(); + change += this_change; + // make sure that the spender approved the transaction + if this_change < 0 && !verifiers.contains(owner) { + return reject(); + } + } + } + } + Ok(change == 0) +} diff --git a/wasm/wasm_source/Cargo.lock b/wasm/Cargo.lock similarity index 89% rename from wasm/wasm_source/Cargo.lock rename to wasm/Cargo.lock index 4905239cb0..e8a72f19d2 100644 --- a/wasm/wasm_source/Cargo.lock +++ b/wasm/Cargo.lock @@ -48,9 +48,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.64" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9a8f622bcf6ff3df478e9deba3e03e4e04b300f8e6a139e192c05fa3490afc7" +checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602" [[package]] name = "ark-bls12-381" @@ -163,9 +163,9 @@ checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" [[package]] name = "async-trait" -version = "0.1.57" +version = "0.1.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76464446b8bc32758d7e88ee1a804d9914cd9b1cb264c029899680b0be29826f" +checksum = "1e805d94e6b5001b651426cf4cd446b1ab5f319d27bab5c644f61de0a804360c" dependencies = [ "proc-macro2", "quote", @@ -249,7 +249,7 @@ dependencies = [ "cc", "cfg-if 1.0.0", "constant_time_eq", - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -320,15 +320,15 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.11.0" +version = "3.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d" +checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" [[package]] name = "byte-slice-cast" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87c5fdd0166095e1d463fc6cc01aa8ce547ad77a4e84d42eb6762b084e28067e" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] name = "bytecheck" @@ -398,6 +398,16 @@ name = "clru" version = "0.5.0" source = "git+https://github.com/marmeladema/clru-rs.git?rev=71ca566#71ca566915f21f3c308091ca7756a91b0f8b5afc" +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + [[package]] name = "concat-idents" version = "1.1.3" @@ -521,26 +531,24 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.10" +version = "0.9.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1" +checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348" dependencies = [ "autocfg", "cfg-if 1.0.0", "crossbeam-utils", "memoffset", - "once_cell", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc" +checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac" dependencies = [ "cfg-if 1.0.0", - "once_cell", ] [[package]] @@ -580,16 +588,60 @@ checksum = "1c359b7249347e46fb28804470d071c921156ad62b3eef5d34e2ba867533dec8" dependencies = [ "byteorder", "digest 0.9.0", - "rand_core 0.6.3", + "rand_core 0.6.4", "subtle-ng", "zeroize", ] +[[package]] +name = "cxx" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f83d0ebf42c6eafb8d7c52f7e5f2d3003b89c7aa4fd2b79229209459a849af8" +dependencies = [ + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07d050484b55975889284352b0ffc2ecbda25c0c55978017c132b29ba0818a86" +dependencies = [ + "cc", + "codespan-reporting", + "once_cell", + "proc-macro2", + "quote", + "scratch", + "syn", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d2199b00553eda8012dfec8d3b1c75fce747cf27c169a270b3b99e3448ab78" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb67a6de1f602736dd7eaead0080cf3435df806c61b24b13328db128c58868f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "darling" -version = "0.13.4" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +checksum = "4529658bdda7fd6769b8614be250cdcfc3aeb0ee72fe66f9e41e5e5eb73eac02" dependencies = [ "darling_core", "darling_macro", @@ -597,9 +649,9 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.13.4" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +checksum = "649c91bc01e8b1eac09fb91e8dbc7d517684ca6be8ebc75bb9cafc894f9fdb6f" dependencies = [ "fnv", "ident_case", @@ -610,9 +662,9 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.13.4" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +checksum = "ddfc69c5bfcbd2fc09a0f38451d2daf0e372e367986a83906d1b0dbc88134fb5" dependencies = [ "darling_core", "quote", @@ -658,9 +710,9 @@ dependencies = [ [[package]] name = "digest" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" +checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c" dependencies = [ "block-buffer 0.10.3", "crypto-common", @@ -710,7 +762,7 @@ checksum = "758e2a0cd8a6cdf483e1d369e7d081647e00b88d8953e34d8f2cbba05ae28368" dependencies = [ "curve25519-dalek-ng", "hex", - "rand_core 0.6.3", + "rand_core 0.6.4", "serde", "sha2 0.9.9", "thiserror", @@ -757,18 +809,18 @@ dependencies = [ [[package]] name = "enumset" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4799cdb24d48f1f8a7a98d06b7fde65a85a2d1e42b25a889f5406aa1fbefe074" +checksum = "19be8061a06ab6f3a6cf21106c873578bf01bd42ad15e0311a9c76161cb1c753" dependencies = [ "enumset_derive", ] [[package]] name = "enumset_derive" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea83a3fbdc1d999ccfbcbee717eab36f8edf2d71693a23ce0d7cca19e085304c" +checksum = "03e7b551eba279bf0fa88b83a46330168c1560a52a94f5126f892f0b364ab3e0" dependencies = [ "darling", "proc-macro2", @@ -788,7 +840,7 @@ dependencies = [ "regex", "serde", "serde_json", - "sha3 0.10.4", + "sha3 0.10.6", "thiserror", "uint", ] @@ -1061,18 +1113,28 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "iana-time-zone" -version = "0.1.47" +version = "0.1.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c495f162af0bf17656d0014a0eded5f3cd2f365fdd204548c2869db89359dc7" +checksum = "f5a6ef98976b22b3b7f2f3a806f858cb862044cfa66805aa3ad84cb3d3b785ed" dependencies = [ "android_system_properties", "core-foundation-sys", + "iana-time-zone-haiku", "js-sys", - "once_cell", "wasm-bindgen", "winapi", ] +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +dependencies = [ + "cxx", + "cxx-build", +] + [[package]] name = "ibc" version = "0.14.0" @@ -1090,7 +1152,7 @@ dependencies = [ "serde", "serde_derive", "serde_json", - "sha2 0.10.5", + "sha2 0.10.6", "subtle-encoding", "tendermint", "tendermint-light-client-verifier", @@ -1201,24 +1263,24 @@ dependencies = [ [[package]] name = "itertools" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] [[package]] name = "itoa" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754" +checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" [[package]] name = "js-sys" -version = "0.3.59" +version = "0.3.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "258451ab10b34f8af53416d1fdab72c22e805f0c92a1136d59470ec0b11138b2" +checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47" dependencies = [ "wasm-bindgen", ] @@ -1243,9 +1305,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" [[package]] name = "libc" -version = "0.2.132" +version = "0.2.135" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5" +checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c" [[package]] name = "libloading" @@ -1299,6 +1361,15 @@ dependencies = [ "libsecp256k1-core", ] +[[package]] +name = "link-cplusplus" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369" +dependencies = [ + "cc", +] + [[package]] name = "log" version = "0.4.17" @@ -1400,7 +1471,7 @@ checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" [[package]] name = "namada" -version = "0.7.1" +version = "0.8.1" dependencies = [ "ark-bls12-381", "ark-serialize", @@ -1429,7 +1500,7 @@ dependencies = [ "prost-types", "pwasm-utils", "rand", - "rand_core 0.6.3", + "rand_core 0.6.4", "rust_decimal", "serde", "serde_json", @@ -1454,7 +1525,7 @@ dependencies = [ [[package]] name = "namada_macros" -version = "0.7.1" +version = "0.8.1" dependencies = [ "quote", "syn", @@ -1462,22 +1533,24 @@ dependencies = [ [[package]] name = "namada_proof_of_stake" -version = "0.7.1" +version = "0.8.1" dependencies = [ "borsh", + "derivative", "proptest", "thiserror", ] [[package]] name = "namada_tests" -version = "0.7.1" +version = "0.8.1" dependencies = [ "chrono", "concat-idents", "derivative", "namada", - "namada_vm_env", + "namada_tx_prelude", + "namada_vp_prelude", "prost", "serde_json", "sha2 0.9.9", @@ -1489,33 +1562,39 @@ dependencies = [ [[package]] name = "namada_tx_prelude" -version = "0.7.1" +version = "0.8.1" dependencies = [ + "borsh", + "namada", + "namada_macros", "namada_vm_env", - "sha2 0.10.5", + "sha2 0.10.6", + "thiserror", ] [[package]] name = "namada_vm_env" -version = "0.7.1" +version = "0.8.1" dependencies = [ "borsh", - "hex", "namada", - "namada_macros", ] [[package]] name = "namada_vp_prelude" -version = "0.7.1" +version = "0.8.1" dependencies = [ + "borsh", + "namada", + "namada_macros", "namada_vm_env", - "sha2 0.10.5", + "sha2 0.10.6", + "thiserror", ] [[package]] name = "namada_wasm" -version = "0.7.1" +version = "0.8.1" dependencies = [ "borsh", "getrandom", @@ -1626,9 +1705,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0" +checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" [[package]] name = "opaque-debug" @@ -1638,9 +1717,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "parity-scale-codec" -version = "3.1.5" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9182e4a71cae089267ab03e67c99368db7cd877baf50f931e5d6d4b71e195ac0" +checksum = "366e44391a8af4cfd6002ef6ba072bae071a96aafca98d7d448a34c5dca38b6a" dependencies = [ "arrayvec", "bitvec", @@ -1676,9 +1755,9 @@ checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" [[package]] name = "pest" -version = "2.3.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b0560d531d1febc25a3c9398a62a71256c0178f2e3443baedd9ad4bb8c9deb4" +checksum = "dbc7bc69c062e492337d74d59b120c274fd3d261b6bf6d3207d499b4b379c41a" dependencies = [ "thiserror", "ucd-trie", @@ -1771,9 +1850,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.43" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab" +checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" dependencies = [ "unicode-ident", ] @@ -1916,7 +1995,7 @@ checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -1926,7 +2005,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -1937,9 +2016,9 @@ checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" [[package]] name = "rand_core" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom", ] @@ -1950,7 +2029,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -2091,9 +2170,9 @@ dependencies = [ [[package]] name = "rlp" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "999508abb0ae792aabed2460c45b89106d97fe4adac593bdaef433c2605847b5" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" dependencies = [ "bytes", "rustc-hex", @@ -2214,6 +2293,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +[[package]] +name = "scratch" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898" + [[package]] name = "seahash" version = "4.1.0" @@ -2240,9 +2325,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.144" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f747710de3dcd43b88c9168773254e809d8ddbdf9653b84e2554ab219f17860" +checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b" dependencies = [ "serde_derive", ] @@ -2258,9 +2343,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.144" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94ed3a816fb1d101812f83e789f888322c34e291f894f19590dc310963e87a00" +checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c" dependencies = [ "proc-macro2", "quote", @@ -2269,9 +2354,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.85" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44" +checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074" dependencies = [ "itoa", "ryu", @@ -2304,13 +2389,13 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9db03534dff993187064c4e0c05a5708d2a9728ace9a8959b77bedf415dac5" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" dependencies = [ "cfg-if 1.0.0", "cpufeatures", - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -2327,11 +2412,11 @@ dependencies = [ [[package]] name = "sha3" -version = "0.10.4" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaedf34ed289ea47c2b741bb72e5357a209512d67bcd4bda44359e5bf0470f56" +checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" dependencies = [ - "digest 0.10.3", + "digest 0.10.5", "keccak", ] @@ -2346,9 +2431,9 @@ dependencies = [ [[package]] name = "signature" -version = "1.4.0" +version = "1.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02658e48d89f2bec991f9a78e69cfa4c316f8d6a6c4ec12fae1aeb263d486788" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" [[package]] name = "simple-error" @@ -2358,9 +2443,9 @@ checksum = "cc47a29ce97772ca5c927f75bac34866b16d64e07f330c3248e2d7226623901b" [[package]] name = "smallvec" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1" +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" [[package]] name = "sp-std" @@ -2371,7 +2456,7 @@ checksum = "35391ea974fa5ee869cb094d5b437688fbf3d8127d64d1b9fed5822a1ed39b12" [[package]] name = "sparse-merkle-tree" version = "0.3.1-pre" -source = "git+https://github.com/heliaxdev/sparse-merkle-tree?branch=bat/arse-merkle-tree#04ad1eeb28901b57a7599bbe433b3822965dabe8" +source = "git+https://github.com/heliaxdev/sparse-merkle-tree?rev=04ad1eeb28901b57a7599bbe433b3822965dabe8#04ad1eeb28901b57a7599bbe433b3822965dabe8" dependencies = [ "borsh", "cfg-if 1.0.0", @@ -2414,9 +2499,9 @@ checksum = "734676eb262c623cec13c3155096e08d1f8f29adce39ba17948b18dad1e54142" [[package]] name = "syn" -version = "1.0.99" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13" +checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1" dependencies = [ "proc-macro2", "quote", @@ -2533,6 +2618,15 @@ dependencies = [ "time", ] +[[package]] +name = "termcolor" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +dependencies = [ + "winapi-util", +] + [[package]] name = "test-log" version = "0.2.11" @@ -2546,18 +2640,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.34" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c1b05ca9d106ba7d2e31a9dab4a64e7be2cce415321966ea3132c49a656e252" +checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.34" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8f2591983642de85c921015f3f070c665a197ed69e417af436115e3a1407487" +checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" dependencies = [ "proc-macro2", "quote", @@ -2575,9 +2669,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.14" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c3f9a28b618c3a6b9251b6908e9c99e04b9e5c02e6581ccbb67d59c34ef7f9b" +checksum = "d634a985c4d4238ec39cacaed2e7ae552fbd3c476b552c1deac3021b7d7eaf0c" dependencies = [ "libc", "num_threads", @@ -2622,9 +2716,9 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.36" +version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fce9567bd60a67d08a16488756721ba392f24f29006402881e43b19aac64307" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" dependencies = [ "cfg-if 1.0.0", "log", @@ -2635,9 +2729,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2" +checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" dependencies = [ "proc-macro2", "quote", @@ -2646,18 +2740,18 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.29" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aeea4303076558a00714b823f9ad67d58a3bbda1df83d8827d21193156e22f7" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" dependencies = [ "once_cell", ] [[package]] name = "tracing-subscriber" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60db860322da191b40952ad9affe65ea23e7dd6a5c442c2c42865810c6ab8e6b" +checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70" dependencies = [ "matchers", "once_cell", @@ -2668,6 +2762,17 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tx_template" +version = "0.8.1" +dependencies = [ + "borsh", + "getrandom", + "namada_tests", + "namada_tx_prelude", + "wee_alloc", +] + [[package]] name = "typenum" version = "1.15.0" @@ -2682,9 +2787,9 @@ checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" [[package]] name = "uint" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f03af7ccf01dd611cc450a0d10dbc9b745770d096473e2faf0ca6e2d66d1e0" +checksum = "a45526d29728d135c2900b0d30573fe3ee79fceb12ef534c7bb30e810a91b601" dependencies = [ "byteorder", "crunchy", @@ -2694,27 +2799,27 @@ dependencies = [ [[package]] name = "unicode-ident" -version = "1.0.3" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4f5b37a154999a8f3f98cc23a628d850e154479cd94decf3414696e12e31aaf" +checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" [[package]] name = "unicode-segmentation" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" +checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" [[package]] name = "unicode-width" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] name = "unicode-xid" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "version_check" @@ -2722,6 +2827,17 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +[[package]] +name = "vp_template" +version = "0.8.1" +dependencies = [ + "borsh", + "getrandom", + "namada_tests", + "namada_vp_prelude", + "wee_alloc", +] + [[package]] name = "wait-timeout" version = "0.2.0" @@ -2739,9 +2855,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.82" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc7652e3f6c4706c8d9cd54832c4a4ccb9b5336e2c3bd154d5cccfbf1c1f5f7d" +checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268" dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -2749,9 +2865,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.82" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "662cd44805586bd52971b9586b1df85cdbbd9112e4ef4d8f41559c334dc6ac3f" +checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142" dependencies = [ "bumpalo", "log", @@ -2764,9 +2880,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.82" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b260f13d3012071dfb1512849c033b1925038373aea48ced3012c09df952c602" +checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2774,9 +2890,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.82" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5be8e654bdd9b79216c2929ab90721aa82faf65c48cdf08bdc4e7f51357b80da" +checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" dependencies = [ "proc-macro2", "quote", @@ -2787,15 +2903,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.82" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6598dd0bd3c7d51095ff6531a5b23e02acdc81804e30d8f07afb77b7215a140a" +checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f" [[package]] name = "wasm-encoder" -version = "0.16.0" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d443c5a7daae71697d97ec12ad70b4fe8766d3a0f4db16158ac8b781365892f7" +checksum = "c64ac98d5d61192cc45c701b7e4bd0b9aff91e2edfc7a088406cfe2288581e2c" dependencies = [ "leb128", ] @@ -3038,9 +3154,9 @@ checksum = "718ed7c55c2add6548cca3ddd6383d738cd73b892df400e96b9aa876f0141d7a" [[package]] name = "wast" -version = "46.0.0" +version = "47.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea0ab19660e3ea6891bba69167b9be40fad00fb1fe3dd39c5eebcee15607131b" +checksum = "02b98502f3978adea49551e801a6687678e6015317d7d9470a67fe813393f2a8" dependencies = [ "leb128", "memchr", @@ -3050,9 +3166,9 @@ dependencies = [ [[package]] name = "wat" -version = "1.0.48" +version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f775282def4d5bffd94d60d6ecd57bfe6faa46171cdbf8d32bd5458842b1e3e" +checksum = "7aab4e20c60429fbba9670a6cae0fff9520046ba0aa3e6d0b1cd2653bea14898" dependencies = [ "wast", ] @@ -3096,6 +3212,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" diff --git a/wasm/Cargo.toml b/wasm/Cargo.toml new file mode 100644 index 0000000000..d6f164a445 --- /dev/null +++ b/wasm/Cargo.toml @@ -0,0 +1,32 @@ +[workspace] +resolver = "2" + +members = [ + "wasm_source", + "tx_template", + "vp_template", +] + +[patch.crates-io] +# TODO temp patch for , and more tba. +borsh = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} +borsh-derive = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} +borsh-derive-internal = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} +borsh-schema-derive-internal = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} +# patched to a commit on the `eth-bridge-integration` branch of our fork +tendermint = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} +tendermint-proto = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} +tendermint-testgen = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} +tendermint-light-client-verifier = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} + +# patched to a commit on the `eth-bridge-integration` branch of our fork +ibc = {git = "https://github.com/heliaxdev/ibc-rs.git", rev = "f4703dfe2c1f25cc431279ab74f10f3e0f6827e2"} +ibc-proto = {git = "https://github.com/heliaxdev/ibc-rs.git", rev = "f4703dfe2c1f25cc431279ab74f10f3e0f6827e2"} + +[profile.release] +# smaller and faster wasm (https://rustwasm.github.io/book/reference/code-size.html#compiling-with-link-time-optimizations-lto) +lto = true +# simply terminate on panics, no unwinding +panic = "abort" +# tell llvm to optimize for size (https://rustwasm.github.io/book/reference/code-size.html#tell-llvm-to-optimize-for-size-instead-of-speed) +opt-level = 'z' \ No newline at end of file diff --git a/wasm/checksums.json b/wasm/checksums.json index a112ed3a63..4001a69fa0 100644 --- a/wasm/checksums.json +++ b/wasm/checksums.json @@ -1,20 +1,19 @@ { - "tx_bond.wasm": "tx_bond.5536bed92d94b1aaaa82fd3531b7d4fedb00ae1f39fede7ef1634e070e5f4455.wasm", + "tx_bond.wasm": "tx_bond.ad18675419b9dd88d4eaf170366754dd12fddc45a67e2cd100b1554ea693de43.wasm", "tx_bridge_pool.wasm": "tx_bridge_pool.e21563260c03cfdab1f195878f49bf93722027ad26fcd097cfebbc5c4d279082.wasm", - "tx_from_intent.wasm": "tx_from_intent.7243e31594d0ca5e656228bed5c84359a3d1dbfd24f1656eb9b52b0266ffaa47.wasm", - "tx_ibc.wasm": "tx_ibc.1c09d3f083a91f28708b7f09698c311e609b712da683dcbcd40e7267a1cf2adc.wasm", - "tx_init_account.wasm": "tx_init_account.95b3a1e4867160eb91f9a7812f53adf33bb44f04b9eb3f310854b7a1d7a26e77.wasm", - "tx_init_nft.wasm": "tx_init_nft.022446ca174c6ccb1e7818224ebc30515123e0bc148ec8dd59ce5b47f7447bd7.wasm", - "tx_init_proposal.wasm": "tx_init_proposal.8f93bb139e932ba24871efe22cb62177c1c7133f8957965c041accad3d04516e.wasm", - "tx_init_validator.wasm": "tx_init_validator.916a8c6a6008d5f35341e58ccafd8169b7596464f1f3253eea96c277f167fe1b.wasm", - "tx_mint_nft.wasm": "tx_mint_nft.9e12869d07ac36a9fdddf69710f2f6905c55bb4dc49c885e87d47b3d6c389ec2.wasm", - "tx_transfer.wasm": "tx_transfer.48c8bfdd119c0753a03c65764b0e099f6647dd158490fb5e04eab171c27b422e.wasm", - "tx_unbond.wasm": "tx_unbond.b2851a1eefd2e781411e495ef52ce6148893c02e06d0160298871fd231e299b4.wasm", - "tx_update_vp.wasm": "tx_update_vp.5b9786f039324205c464fec6c795d84ea3ec9b24d3c6c63e7462444811237855.wasm", - "tx_vote_proposal.wasm": "tx_vote_proposal.31c96b1b171d0020c0788170f9677606589324024a90e3d532da2b9a2cedf57e.wasm", - "tx_withdraw.wasm": "tx_withdraw.5e360d269c8d3ae574ae9cbf301e1d9eed8c3ad6c103deb5e1c46ddb35af5708.wasm", - "vp_nft.wasm": "vp_nft.29b7fb0b2c247bb5389166e82707abd256177f276bbaf40ccd201ecad8039bad.wasm", - "vp_testnet_faucet.wasm": "vp_testnet_faucet.e1b7805802df2871a93f584e4df97a4584cda9d58435b14c73791fe18d93e4f4.wasm", - "vp_token.wasm": "vp_token.8332429c84d70ab6614b20a41d208ac61126141d302319a99269f6fc9e08053a.wasm", - "vp_user.wasm": "vp_user.795a1dccb7b18f0abeb07161d07e31ce5d64221a52e85098627ab60115462ba4.wasm" + "tx_ibc.wasm": "tx_ibc.b5a3bf6ca1dea0767406d64251928816a7d95b974cff09f6e702a8f3fbd64b1f.wasm", + "tx_init_account.wasm": "tx_init_account.343e04328e157514ec85cfb650cad5cad659eac27e80b1a0dec61286352a3c9d.wasm", + "tx_init_nft.wasm": "tx_init_nft.36437ec1cf161d3e21f8a4f1e939676914dfdcb7ee667c92c2807767ddfabdb3.wasm", + "tx_init_proposal.wasm": "tx_init_proposal.c8e187e2bd7869253f9d9d7de5fa2cb5896e9ca114f124ad800131c9e82db1a7.wasm", + "tx_init_validator.wasm": "tx_init_validator.8c047862fb2e538dfe414880a9b847741b84a0b8fcb4beb67752f58b7d954b6f.wasm", + "tx_mint_nft.wasm": "tx_mint_nft.110baf82d92f78ca740750808237ecd4793e24b662876f363f51c5d1cd030694.wasm", + "tx_transfer.wasm": "tx_transfer.07335720d2ab07311219a81fd6baca5801792dc16b7c9bab490e2b756257a6dd.wasm", + "tx_unbond.wasm": "tx_unbond.895123c93e6e7c6d2303be44cc9332a7a9a867cf159ce87cf55abb155e8d83ca.wasm", + "tx_update_vp.wasm": "tx_update_vp.d2743de89548f3ae6decf2a32ab086e960be9b954bdd24bd6e8e731195449540.wasm", + "tx_vote_proposal.wasm": "tx_vote_proposal.348c3c28fc9e7356a7106f4b037a0bc7b6b207761b000ad0e0cb786678afbee5.wasm", + "tx_withdraw.wasm": "tx_withdraw.a2a0a3f9eb961cba5bb4d1677805bafdcc807637fbd203f8afaa2aa2adb6857e.wasm", + "vp_nft.wasm": "vp_nft.e88e46e49cbbc28dd1fc4e518195bffc4d1feb43b4976d02580865298fd29e75.wasm", + "vp_testnet_faucet.wasm": "vp_testnet_faucet.f6b3d44133b0c35cdbfbe19328d8cdfc62809bd30a0c64eef57f3877cc7e8c2f.wasm", + "vp_token.wasm": "vp_token.2100aaa1fed90d35e87aace6516794facdd7aab3062102c1a762bef604c98075.wasm", + "vp_user.wasm": "vp_user.14fdcbaa1bd3c28115a3eb1f53802b4042080bb255e276b15d2fb16338aacf31.wasm" } \ No newline at end of file diff --git a/wasm/release.toml b/wasm/release.toml new file mode 100644 index 0000000000..dbacf2cec3 --- /dev/null +++ b/wasm/release.toml @@ -0,0 +1,7 @@ +allow-branch = ["main", "maint-*"] +consolidate-commits = true +pre-release-commit-message = "fixup! Namada {{version}}" +publish = false +push = false +shared-version = true +tag = false diff --git a/wasm/tx_template/Cargo.lock b/wasm/tx_template/Cargo.lock deleted file mode 100644 index b59950f447..0000000000 --- a/wasm/tx_template/Cargo.lock +++ /dev/null @@ -1,3106 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "addr2line" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b" -dependencies = [ - "gimli 0.26.1", -] - -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - -[[package]] -name = "ahash" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" -dependencies = [ - "getrandom", - "once_cell", - "version_check", -] - -[[package]] -name = "aho-corasick" -version = "0.7.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" -dependencies = [ - "memchr", -] - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "anyhow" -version = "1.0.55" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "159bb86af3a200e19a068f4224eae4c8bb2d0fa054c7e5d1cacd5cef95e684cd" - -[[package]] -name = "ark-bls12-381" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65be532f9dd1e98ad0150b037276cde464c6f371059e6dd02c0222395761f6aa" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-std", -] - -[[package]] -name = "ark-ec" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dea978406c4b1ca13c2db2373b05cc55429c3575b8b21f1b9ee859aa5b03dd42" -dependencies = [ - "ark-ff", - "ark-serialize", - "ark-std", - "derivative", - "num-traits", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" -dependencies = [ - "ark-ff-asm", - "ark-ff-macros", - "ark-serialize", - "ark-std", - "derivative", - "num-bigint", - "num-traits", - "paste", - "rustc_version", - "zeroize", -] - -[[package]] -name = "ark-ff-asm" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" -dependencies = [ - "quote", - "syn", -] - -[[package]] -name = "ark-ff-macros" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" -dependencies = [ - "num-bigint", - "num-traits", - "quote", - "syn", -] - -[[package]] -name = "ark-serialize" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" -dependencies = [ - "ark-serialize-derive", - "ark-std", - "digest 0.9.0", -] - -[[package]] -name = "ark-serialize-derive" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8dd4e5f0bf8285d5ed538d27fab7411f3e297908fd93c62195de8bee3f199e82" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "ark-std" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" -dependencies = [ - "num-traits", - "rand", -] - -[[package]] -name = "arrayref" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" - -[[package]] -name = "arrayvec" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" - -[[package]] -name = "async-trait" -version = "0.1.52" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "061a7acccaa286c011ddc30970520b98fa40e00c9d644633fb26b5fc63a265e3" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "backtrace" -version = "0.3.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e121dee8023ce33ab248d9ce1493df03c3b38a659b240096fcbd7048ff9c31f" -dependencies = [ - "addr2line", - "cc", - "cfg-if 1.0.0", - "libc", - "miniz_oxide", - "object 0.27.1", - "rustc-demangle", -] - -[[package]] -name = "base64" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" - -[[package]] -name = "bech32" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9ff0bbfd639f15c74af777d81383cf53efb7c93613f6cab67c6c11e05bbf8b" - -[[package]] -name = "bit-set" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] - -[[package]] -name = "blake3" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "882e99e4a0cb2ae6cb6e442102e8e6b7131718d94110e64c3e6a34ea9b106f37" -dependencies = [ - "arrayref", - "arrayvec", - "cc", - "cfg-if 1.0.0", - "constant_time_eq", - "digest 0.10.3", -] - -[[package]] -name = "block-buffer" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" -dependencies = [ - "block-padding", - "generic-array", -] - -[[package]] -name = "block-buffer" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1d36a02058e76b040de25a4464ba1c80935655595b661505c8b39b664828b95" -dependencies = [ - "generic-array", -] - -[[package]] -name = "block-padding" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" - -[[package]] -name = "borsh" -version = "0.9.4" -source = "git+https://github.com/heliaxdev/borsh-rs.git?rev=cd5223e5103c4f139e0c54cf8259b7ec5ec4073a#cd5223e5103c4f139e0c54cf8259b7ec5ec4073a" -dependencies = [ - "borsh-derive", - "hashbrown", -] - -[[package]] -name = "borsh-derive" -version = "0.9.4" -source = "git+https://github.com/heliaxdev/borsh-rs.git?rev=cd5223e5103c4f139e0c54cf8259b7ec5ec4073a#cd5223e5103c4f139e0c54cf8259b7ec5ec4073a" -dependencies = [ - "borsh-derive-internal", - "borsh-schema-derive-internal", - "proc-macro-crate 0.1.5", - "proc-macro2", - "syn", -] - -[[package]] -name = "borsh-derive-internal" -version = "0.9.4" -source = "git+https://github.com/heliaxdev/borsh-rs.git?rev=cd5223e5103c4f139e0c54cf8259b7ec5ec4073a#cd5223e5103c4f139e0c54cf8259b7ec5ec4073a" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "borsh-schema-derive-internal" -version = "0.9.4" -source = "git+https://github.com/heliaxdev/borsh-rs.git?rev=cd5223e5103c4f139e0c54cf8259b7ec5ec4073a#cd5223e5103c4f139e0c54cf8259b7ec5ec4073a" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "bumpalo" -version = "3.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" - -[[package]] -name = "byte-slice-cast" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87c5fdd0166095e1d463fc6cc01aa8ce547ad77a4e84d42eb6762b084e28067e" - -[[package]] -name = "bytecheck" -version = "0.6.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "314889ea31cda264cb7c3d6e6e5c9415a987ecb0e72c17c00d36fbb881d34abe" -dependencies = [ - "bytecheck_derive", - "ptr_meta", -] - -[[package]] -name = "bytecheck_derive" -version = "0.6.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a2b3b92c135dae665a6f760205b89187638e83bed17ef3e44e83c712cf30600" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "byteorder" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" - -[[package]] -name = "bytes" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" - -[[package]] -name = "cc" -version = "1.0.73" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" - -[[package]] -name = "cfg-if" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "chrono" -version = "0.4.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1" -dependencies = [ - "iana-time-zone", - "num-integer", - "num-traits", - "winapi", -] - -[[package]] -name = "clru" -version = "0.5.0" -source = "git+https://github.com/marmeladema/clru-rs.git?rev=71ca566#71ca566915f21f3c308091ca7756a91b0f8b5afc" - -[[package]] -name = "concat-idents" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6f90860248d75014b7b103db8fee4f291c07bfb41306cdf77a0a5ab7a10d2f" -dependencies = [ - "quote", - "syn", -] - -[[package]] -name = "constant_time_eq" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" - -[[package]] -name = "core-foundation-sys" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" - -[[package]] -name = "cpufeatures" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" -dependencies = [ - "libc", -] - -[[package]] -name = "cranelift-bforest" -version = "0.76.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e6bea67967505247f54fa2c85cf4f6e0e31c4e5692c9b70e4ae58e339067333" -dependencies = [ - "cranelift-entity", -] - -[[package]] -name = "cranelift-codegen" -version = "0.76.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48194035d2752bdd5bdae429e3ab88676e95f52a2b1355a5d4e809f9e39b1d74" -dependencies = [ - "cranelift-bforest", - "cranelift-codegen-meta", - "cranelift-codegen-shared", - "cranelift-entity", - "gimli 0.25.0", - "log", - "regalloc", - "smallvec", - "target-lexicon", -] - -[[package]] -name = "cranelift-codegen-meta" -version = "0.76.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976efb22fcab4f2cd6bd4e9913764616a54d895c1a23530128d04e03633c555f" -dependencies = [ - "cranelift-codegen-shared", - "cranelift-entity", -] - -[[package]] -name = "cranelift-codegen-shared" -version = "0.76.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dabb5fe66e04d4652e434195b45ae65b5c8172d520247b8f66d8df42b2b45dc" - -[[package]] -name = "cranelift-entity" -version = "0.76.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3329733e4d4b8e91c809efcaa4faee80bf66f20164e3dd16d707346bd3494799" - -[[package]] -name = "cranelift-frontend" -version = "0.76.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "279afcc0d3e651b773f94837c3d581177b348c8d69e928104b2e9fccb226f921" -dependencies = [ - "cranelift-codegen", - "log", - "smallvec", - "target-lexicon", -] - -[[package]] -name = "crc32fast" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" -dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "crossbeam-channel" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e54ea8bc3fb1ee042f5aace6e3c6e025d3874866da222930f70ce62aceba0bfa" -dependencies = [ - "cfg-if 1.0.0", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" -dependencies = [ - "cfg-if 1.0.0", - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c00d6d2ea26e8b151d99093005cb442fb9a37aeaca582a03ec70946f49ab5ed9" -dependencies = [ - "cfg-if 1.0.0", - "crossbeam-utils", - "lazy_static", - "memoffset", - "scopeguard", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e5bed1f1c269533fa816a0a5492b3545209a205ca1a54842be180eb63a16a6" -dependencies = [ - "cfg-if 1.0.0", - "lazy_static", -] - -[[package]] -name = "crunchy" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" - -[[package]] -name = "crypto-common" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "curve25519-dalek" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9fdf9972b2bd6af2d913799d9ebc165ea4d2e65878e329d9c6b372c4491b61" -dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.5.1", - "subtle", - "zeroize", -] - -[[package]] -name = "curve25519-dalek-ng" -version = "4.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c359b7249347e46fb28804470d071c921156ad62b3eef5d34e2ba867533dec8" -dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.6.3", - "subtle-ng", - "zeroize", -] - -[[package]] -name = "darling" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0d720b8683f8dd83c65155f0530560cba68cd2bf395f6513a483caee57ff7f4" -dependencies = [ - "darling_core", - "darling_macro", -] - -[[package]] -name = "darling_core" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a340f241d2ceed1deb47ae36c4144b2707ec7dd0b649f894cb39bb595986324" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn", -] - -[[package]] -name = "darling_macro" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72c41b3b7352feb3211a0d743dc5700a4e3b60f51bd2b368892d1e0f9a95f44b" -dependencies = [ - "darling_core", - "quote", - "syn", -] - -[[package]] -name = "data-encoding" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57" - -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "derive_more" -version = "0.99.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array", -] - -[[package]] -name = "digest" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" -dependencies = [ - "block-buffer 0.10.0", - "crypto-common", - "subtle", -] - -[[package]] -name = "dynasm" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47b1801e630bd336d0bbbdbf814de6cc749c9a400c7e3d995e6adfd455d0c83c" -dependencies = [ - "bitflags", - "byteorder", - "lazy_static", - "proc-macro-error", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "dynasmrt" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d428afc93ad288f6dffc1fa5f4a78201ad2eec33c5a522e51c181009eb09061" -dependencies = [ - "byteorder", - "dynasm", - "memmap2", -] - -[[package]] -name = "ed25519" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eed12bbf7b5312f8da1c2722bc06d8c6b12c2d86a7fb35a194c7f3e6fc2bbe39" -dependencies = [ - "signature", -] - -[[package]] -name = "ed25519-consensus" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "542217a53411d471743362251a5a1770a667cb0cc0384c9be2c0952bd70a7275" -dependencies = [ - "curve25519-dalek-ng", - "hex", - "rand_core 0.6.3", - "serde", - "sha2 0.9.9", - "thiserror", - "zeroize", -] - -[[package]] -name = "ed25519-dalek" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" -dependencies = [ - "curve25519-dalek", - "ed25519", - "sha2 0.9.9", - "zeroize", -] - -[[package]] -name = "either" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" - -[[package]] -name = "enum-iterator" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eeac5c5edb79e4e39fe8439ef35207780a11f69c52cbe424ce3dfad4cb78de6" -dependencies = [ - "enum-iterator-derive", -] - -[[package]] -name = "enum-iterator-derive" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c134c37760b27a871ba422106eedbb8247da973a09e82558bf26d619c882b159" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "enumset" -version = "1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6216d2c19a6fb5f29d1ada1dc7bc4367a8cbf0fa4af5cf12e07b5bbdde6b5b2c" -dependencies = [ - "enumset_derive", -] - -[[package]] -name = "enumset_derive" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6451128aa6655d880755345d085494cf7561a6bee7c8dc821e5d77e6d267ecd4" -dependencies = [ - "darling", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "ethabi" -version = "17.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4966fba78396ff92db3b817ee71143eccd98acf0f876b8d600e585a670c5d1b" -dependencies = [ - "ethereum-types", - "hex", - "once_cell", - "regex", - "serde", - "serde_json", - "sha3 0.10.2", - "thiserror", - "uint", -] - -[[package]] -name = "ethbloom" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11da94e443c60508eb62cf256243a64da87304c2802ac2528847f79d750007ef" -dependencies = [ - "crunchy", - "fixed-hash", - "impl-rlp", - "impl-serde", - "tiny-keccak", -] - -[[package]] -name = "ethereum-types" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2827b94c556145446fcce834ca86b7abf0c39a805883fe20e72c5bfdb5a0dc6" -dependencies = [ - "ethbloom", - "fixed-hash", - "impl-rlp", - "impl-serde", - "primitive-types", - "uint", -] - -[[package]] -name = "eyre" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" -dependencies = [ - "indenter", - "once_cell", -] - -[[package]] -name = "fallible-iterator" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" - -[[package]] -name = "fastrand" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" -dependencies = [ - "instant", -] - -[[package]] -name = "ferveo-common" -version = "0.1.0" -source = "git+https://github.com/anoma/ferveo#8363c33d1cf79f93ce9fa89d4b5fe998a5a78c26" -dependencies = [ - "anyhow", - "ark-ec", - "ark-serialize", - "ark-std", - "serde", - "serde_bytes", -] - -[[package]] -name = "fixed-hash" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" -dependencies = [ - "byteorder", - "rand", - "rustc-hex", - "static_assertions", -] - -[[package]] -name = "fixedbitset" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "279fb028e20b3c4c320317955b77c5e0c9701f05a1d309905d6fc702cdc5053e" - -[[package]] -name = "flex-error" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c606d892c9de11507fa0dcffc116434f94e105d0bbdc4e405b61519464c49d7b" -dependencies = [ - "eyre", - "paste", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - -[[package]] -name = "futures" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" - -[[package]] -name = "futures-io" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" - -[[package]] -name = "futures-sink" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" - -[[package]] -name = "futures-task" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" - -[[package]] -name = "futures-util" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" -dependencies = [ - "futures-core", - "futures-sink", - "futures-task", - "pin-project-lite", - "pin-utils", -] - -[[package]] -name = "generic-array" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77" -dependencies = [ - "cfg-if 1.0.0", - "libc", - "wasi", -] - -[[package]] -name = "gimli" -version = "0.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0a01e0497841a3b2db4f8afa483cce65f7e96a3498bd6c541734792aeac8fe7" -dependencies = [ - "fallible-iterator", - "indexmap", - "stable_deref_trait", -] - -[[package]] -name = "gimli" -version = "0.26.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4" - -[[package]] -name = "gumdrop" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46571f5d540478cf70d2a42dd0d6d8e9f4b9cc7531544b93311e657b86568a0b" -dependencies = [ - "gumdrop_derive", -] - -[[package]] -name = "gumdrop_derive" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915ef07c710d84733522461de2a734d4d62a3fd39a4d4f404c2f385ef8618d05" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" -dependencies = [ - "ahash", -] - -[[package]] -name = "heck" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "iana-time-zone" -version = "0.1.50" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd911b35d940d2bd0bea0f9100068e5b97b51a1cbe13d13382f132e0365257a0" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "js-sys", - "wasm-bindgen", - "winapi", -] - -[[package]] -name = "ibc" -version = "0.14.0" -source = "git+https://github.com/heliaxdev/ibc-rs.git?rev=f4703dfe2c1f25cc431279ab74f10f3e0f6827e2#f4703dfe2c1f25cc431279ab74f10f3e0f6827e2" -dependencies = [ - "bytes", - "derive_more", - "flex-error", - "ibc-proto", - "ics23", - "num-traits", - "prost", - "prost-types", - "safe-regex", - "serde", - "serde_derive", - "serde_json", - "sha2 0.10.2", - "subtle-encoding", - "tendermint", - "tendermint-light-client-verifier", - "tendermint-proto", - "tendermint-testgen", - "time", - "tracing", -] - -[[package]] -name = "ibc-proto" -version = "0.17.1" -source = "git+https://github.com/heliaxdev/ibc-rs.git?rev=f4703dfe2c1f25cc431279ab74f10f3e0f6827e2#f4703dfe2c1f25cc431279ab74f10f3e0f6827e2" -dependencies = [ - "base64", - "bytes", - "prost", - "prost-types", - "serde", - "tendermint-proto", -] - -[[package]] -name = "ics23" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d454cc0a22bd556cc3d3c69f9d75a392a36244634840697a4b9eb81bc5c8ae0" -dependencies = [ - "anyhow", - "bytes", - "hex", - "prost", - "ripemd160", - "sha2 0.9.9", - "sha3 0.9.1", - "sp-std", -] - -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - -[[package]] -name = "impl-codec" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" -dependencies = [ - "parity-scale-codec", -] - -[[package]] -name = "impl-rlp" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" -dependencies = [ - "rlp", -] - -[[package]] -name = "impl-serde" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4551f042f3438e64dbd6226b20527fc84a6e1fe65688b58746a2f53623f25f5c" -dependencies = [ - "serde", -] - -[[package]] -name = "impl-trait-for-tuples" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "indenter" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" - -[[package]] -name = "indexmap" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" -dependencies = [ - "autocfg", - "hashbrown", - "serde", -] - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "itertools" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" - -[[package]] -name = "js-sys" -version = "0.3.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "keccak" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67c21572b4949434e4fc1e1978b99c5f77064153c59d998bf13ecd96fb5ecba7" - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "leb128" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" - -[[package]] -name = "libc" -version = "0.2.134" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "329c933548736bc49fd575ee68c89e8be4d260064184389a5b77517cddd99ffb" - -[[package]] -name = "libloading" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afe203d669ec979b7128619bae5a63b7b42e9203c1b29146079ee05e2f604b52" -dependencies = [ - "cfg-if 1.0.0", - "winapi", -] - -[[package]] -name = "libsecp256k1" -version = "0.7.0" -source = "git+https://github.com/heliaxdev/libsecp256k1?rev=bbb3bd44a49db361f21d9db80f9a087c194c0ae9#bbb3bd44a49db361f21d9db80f9a087c194c0ae9" -dependencies = [ - "arrayref", - "base64", - "digest 0.9.0", - "libsecp256k1-core", - "libsecp256k1-gen-ecmult", - "libsecp256k1-gen-genmult", - "rand", - "serde", - "sha2 0.9.9", -] - -[[package]] -name = "libsecp256k1-core" -version = "0.3.0" -source = "git+https://github.com/heliaxdev/libsecp256k1?rev=bbb3bd44a49db361f21d9db80f9a087c194c0ae9#bbb3bd44a49db361f21d9db80f9a087c194c0ae9" -dependencies = [ - "crunchy", - "digest 0.9.0", - "subtle", -] - -[[package]] -name = "libsecp256k1-gen-ecmult" -version = "0.3.0" -source = "git+https://github.com/heliaxdev/libsecp256k1?rev=bbb3bd44a49db361f21d9db80f9a087c194c0ae9#bbb3bd44a49db361f21d9db80f9a087c194c0ae9" -dependencies = [ - "libsecp256k1-core", -] - -[[package]] -name = "libsecp256k1-gen-genmult" -version = "0.3.0" -source = "git+https://github.com/heliaxdev/libsecp256k1?rev=bbb3bd44a49db361f21d9db80f9a087c194c0ae9#bbb3bd44a49db361f21d9db80f9a087c194c0ae9" -dependencies = [ - "libsecp256k1-core", -] - -[[package]] -name = "log" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" -dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "loupe" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6a72dfa44fe15b5e76b94307eeb2ff995a8c5b283b55008940c02e0c5b634d" -dependencies = [ - "indexmap", - "loupe-derive", - "rustversion", -] - -[[package]] -name = "loupe-derive" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0fbfc88337168279f2e9ae06e157cfed4efd3316e14dc96ed074d4f2e6c5952" -dependencies = [ - "quote", - "syn", -] - -[[package]] -name = "mach" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" -dependencies = [ - "libc", -] - -[[package]] -name = "matchers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" -dependencies = [ - "regex-automata", -] - -[[package]] -name = "memchr" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" - -[[package]] -name = "memmap2" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057a3db23999c867821a7a59feb06a578fcb03685e983dff90daf9e7d24ac08f" -dependencies = [ - "libc", -] - -[[package]] -name = "memoffset" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" -dependencies = [ - "autocfg", -] - -[[package]] -name = "memory_units" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8452105ba047068f40ff7093dd1d9da90898e63dd61736462e9cdda6a90ad3c3" - -[[package]] -name = "miniz_oxide" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" -dependencies = [ - "adler", - "autocfg", -] - -[[package]] -name = "more-asserts" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" - -[[package]] -name = "multimap" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" - -[[package]] -name = "namada" -version = "0.7.1" -dependencies = [ - "ark-bls12-381", - "ark-serialize", - "bech32", - "borsh", - "chrono", - "clru", - "data-encoding", - "derivative", - "ed25519-consensus", - "ethabi", - "eyre", - "ferveo-common", - "hex", - "ibc", - "ibc-proto", - "ics23", - "itertools", - "libsecp256k1", - "loupe", - "namada_proof_of_stake", - "num-rational", - "parity-wasm", - "proptest", - "prost", - "prost-types", - "pwasm-utils", - "rand", - "rand_core 0.6.3", - "rust_decimal", - "serde", - "serde_json", - "sha2 0.9.9", - "sparse-merkle-tree", - "tempfile", - "tendermint", - "tendermint-proto", - "thiserror", - "tiny-keccak", - "tonic-build", - "tracing", - "wasmer", - "wasmer-cache", - "wasmer-compiler-singlepass", - "wasmer-engine-dylib", - "wasmer-engine-universal", - "wasmer-vm", - "wasmparser 0.83.0", - "zeroize", -] - -[[package]] -name = "namada_macros" -version = "0.7.1" -dependencies = [ - "quote", - "syn", -] - -[[package]] -name = "namada_proof_of_stake" -version = "0.7.1" -dependencies = [ - "borsh", - "proptest", - "thiserror", -] - -[[package]] -name = "namada_tests" -version = "0.7.1" -dependencies = [ - "chrono", - "concat-idents", - "derivative", - "namada", - "namada_vm_env", - "prost", - "serde_json", - "sha2 0.9.9", - "tempfile", - "test-log", - "tracing", - "tracing-subscriber", -] - -[[package]] -name = "namada_tx_prelude" -version = "0.7.1" -dependencies = [ - "namada_vm_env", - "sha2 0.10.2", -] - -[[package]] -name = "namada_vm_env" -version = "0.7.1" -dependencies = [ - "borsh", - "hex", - "namada", - "namada_macros", -] - -[[package]] -name = "num-bigint" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-derive" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "num-integer" -version = "0.1.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" -dependencies = [ - "autocfg", - "num-traits", -] - -[[package]] -name = "num-rational" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" -dependencies = [ - "autocfg", - "num-bigint", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" -dependencies = [ - "autocfg", -] - -[[package]] -name = "num_cpus" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "num_threads" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97ba99ba6393e2c3734791401b66902d981cb03bf190af674ca69949b6d5fb15" -dependencies = [ - "libc", -] - -[[package]] -name = "object" -version = "0.27.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9" -dependencies = [ - "memchr", -] - -[[package]] -name = "object" -version = "0.28.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40bec70ba014595f99f7aa110b84331ffe1ee9aece7fe6f387cc7e3ecda4d456" -dependencies = [ - "crc32fast", - "hashbrown", - "indexmap", - "memchr", -] - -[[package]] -name = "once_cell" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e" - -[[package]] -name = "opaque-debug" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" - -[[package]] -name = "parity-scale-codec" -version = "3.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9182e4a71cae089267ab03e67c99368db7cd877baf50f931e5d6d4b71e195ac0" -dependencies = [ - "arrayvec", - "bitvec", - "byte-slice-cast", - "impl-trait-for-tuples", - "parity-scale-codec-derive", - "serde", -] - -[[package]] -name = "parity-scale-codec-derive" -version = "3.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9299338969a3d2f491d65f140b00ddec470858402f888af98e8642fb5e8965cd" -dependencies = [ - "proc-macro-crate 1.2.1", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "parity-wasm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be5e13c266502aadf83426d87d81a0f5d1ef45b8027f5a471c360abfe4bfae92" - -[[package]] -name = "paste" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0744126afe1a6dd7f394cb50a716dbe086cb06e255e53d8d0185d82828358fb5" - -[[package]] -name = "pest" -version = "2.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" -dependencies = [ - "ucd-trie", -] - -[[package]] -name = "petgraph" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a13a2fa9d0b63e5f22328828741e523766fff0ee9e779316902290dff3f824f" -dependencies = [ - "fixedbitset", - "indexmap", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "ppv-lite86" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" - -[[package]] -name = "primitive-types" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28720988bff275df1f51b171e1b2a18c30d194c4d2b61defdacecd625a5d94a" -dependencies = [ - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "uint", -] - -[[package]] -name = "proc-macro-crate" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" -dependencies = [ - "toml", -] - -[[package]] -name = "proc-macro-crate" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9" -dependencies = [ - "once_cell", - "thiserror", - "toml", -] - -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - -[[package]] -name = "proc-macro2" -version = "1.0.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" -dependencies = [ - "unicode-xid", -] - -[[package]] -name = "proptest" -version = "1.0.0" -source = "git+https://github.com/heliaxdev/proptest?branch=tomas/sm#b9517a726c032897a8b41c215147f44588b33dcc" -dependencies = [ - "bit-set", - "bitflags", - "byteorder", - "lazy_static", - "num-traits", - "quick-error 2.0.1", - "rand", - "rand_chacha", - "rand_xorshift", - "regex-syntax", - "rusty-fork", - "tempfile", -] - -[[package]] -name = "prost" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" -dependencies = [ - "bytes", - "prost-derive", -] - -[[package]] -name = "prost-build" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" -dependencies = [ - "bytes", - "heck", - "itertools", - "lazy_static", - "log", - "multimap", - "petgraph", - "prost", - "prost-types", - "regex", - "tempfile", - "which", -] - -[[package]] -name = "prost-derive" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9cc1a3263e07e0bf68e96268f37665207b49560d98739662cdfaae215c720fe" -dependencies = [ - "anyhow", - "itertools", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "prost-types" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" -dependencies = [ - "bytes", - "prost", -] - -[[package]] -name = "ptr_meta" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" -dependencies = [ - "ptr_meta_derive", -] - -[[package]] -name = "ptr_meta_derive" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "pwasm-utils" -version = "0.18.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "880b3384fb00b8f6ecccd5d358b93bd2201900ae3daad213791d1864f6441f5c" -dependencies = [ - "byteorder", - "log", - "parity-wasm", -] - -[[package]] -name = "quick-error" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" - -[[package]] -name = "quick-error" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" - -[[package]] -name = "quote" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha", - "rand_core 0.6.3", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.3", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" - -[[package]] -name = "rand_core" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" -dependencies = [ - "getrandom", -] - -[[package]] -name = "rand_xorshift" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" -dependencies = [ - "rand_core 0.6.3", -] - -[[package]] -name = "rayon" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" -dependencies = [ - "autocfg", - "crossbeam-deque", - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" -dependencies = [ - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-utils", - "lazy_static", - "num_cpus", -] - -[[package]] -name = "redox_syscall" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8380fe0152551244f0747b1bf41737e0f8a74f97a14ccefd1148187271634f3c" -dependencies = [ - "bitflags", -] - -[[package]] -name = "regalloc" -version = "0.0.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "571f7f397d61c4755285cd37853fe8e03271c243424a907415909379659381c5" -dependencies = [ - "log", - "rustc-hash", - "smallvec", -] - -[[package]] -name = "regex" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.6.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" - -[[package]] -name = "region" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76e189c2369884dce920945e2ddf79b3dff49e071a167dd1817fa9c4c00d512e" -dependencies = [ - "bitflags", - "libc", - "mach", - "winapi", -] - -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] - -[[package]] -name = "rend" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79af64b4b6362ffba04eef3a4e10829718a4896dac19daa741851c86781edf95" -dependencies = [ - "bytecheck", -] - -[[package]] -name = "ripemd160" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eca4ecc81b7f313189bf73ce724400a07da2a6dac19588b03c8bd76a2dcc251" -dependencies = [ - "block-buffer 0.9.0", - "digest 0.9.0", - "opaque-debug", -] - -[[package]] -name = "rkyv" -version = "0.7.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a37de5dfc60bae2d94961dacd03c7b80e426b66a99fa1b17799570dbdd8f96" -dependencies = [ - "bytecheck", - "hashbrown", - "ptr_meta", - "rend", - "rkyv_derive", - "seahash", -] - -[[package]] -name = "rkyv_derive" -version = "0.7.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719d447dd0e84b23cee6cb5b32d97e21efb112a3e3c636c8da36647b938475a1" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "rlp" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "999508abb0ae792aabed2460c45b89106d97fe4adac593bdaef433c2605847b5" -dependencies = [ - "bytes", - "rustc-hex", -] - -[[package]] -name = "rust_decimal" -version = "1.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d37baa70cf8662d2ba1c1868c5983dda16ef32b105cce41fb5c47e72936a90b3" -dependencies = [ - "arrayvec", - "num-traits", - "serde", -] - -[[package]] -name = "rustc-demangle" -version = "0.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" - -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - -[[package]] -name = "rustc-hex" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" - -[[package]] -name = "rustc_version" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" -dependencies = [ - "semver", -] - -[[package]] -name = "rustversion" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" - -[[package]] -name = "rusty-fork" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" -dependencies = [ - "fnv", - "quick-error 1.2.3", - "tempfile", - "wait-timeout", -] - -[[package]] -name = "ryu" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" - -[[package]] -name = "safe-proc-macro2" -version = "1.0.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "814c536dcd27acf03296c618dab7ad62d28e70abd7ba41d3f34a2ce707a2c666" -dependencies = [ - "unicode-xid", -] - -[[package]] -name = "safe-quote" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77e530f7831f3feafcd5f1aae406ac205dd998436b4007c8e80f03eca78a88f7" -dependencies = [ - "safe-proc-macro2", -] - -[[package]] -name = "safe-regex" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15289bf322e0673d52756a18194167f2378ec1a15fe884af6e2d2cb934822b0" -dependencies = [ - "safe-regex-macro", -] - -[[package]] -name = "safe-regex-compiler" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fba76fae590a2aa665279deb1f57b5098cbace01a0c5e60e262fcf55f7c51542" -dependencies = [ - "safe-proc-macro2", - "safe-quote", -] - -[[package]] -name = "safe-regex-macro" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96c2e96b5c03f158d1b16ba79af515137795f4ad4e8de3f790518aae91f1d127" -dependencies = [ - "safe-proc-macro2", - "safe-regex-compiler", -] - -[[package]] -name = "scopeguard" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" - -[[package]] -name = "seahash" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" - -[[package]] -name = "semver" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" -dependencies = [ - "semver-parser", -] - -[[package]] -name = "semver-parser" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" -dependencies = [ - "pest", -] - -[[package]] -name = "serde" -version = "1.0.136" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_bytes" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_derive" -version = "1.0.136" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_json" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "serde_repr" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98d0516900518c29efa217c298fa1f4e6c6ffc85ae29fd7f4ee48f176e1a9ed5" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "sha2" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if 1.0.0", - "cpufeatures", - "digest 0.9.0", - "opaque-debug", -] - -[[package]] -name = "sha2" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" -dependencies = [ - "cfg-if 1.0.0", - "cpufeatures", - "digest 0.10.3", -] - -[[package]] -name = "sha3" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" -dependencies = [ - "block-buffer 0.9.0", - "digest 0.9.0", - "keccak", - "opaque-debug", -] - -[[package]] -name = "sha3" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a31480366ec990f395a61b7c08122d99bd40544fdb5abcfc1b06bb29994312c" -dependencies = [ - "digest 0.10.3", - "keccak", -] - -[[package]] -name = "sharded-slab" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "signature" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02658e48d89f2bec991f9a78e69cfa4c316f8d6a6c4ec12fae1aeb263d486788" - -[[package]] -name = "simple-error" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc47a29ce97772ca5c927f75bac34866b16d64e07f330c3248e2d7226623901b" - -[[package]] -name = "smallvec" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" - -[[package]] -name = "sp-std" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35391ea974fa5ee869cb094d5b437688fbf3d8127d64d1b9fed5822a1ed39b12" - -[[package]] -name = "sparse-merkle-tree" -version = "0.3.1-pre" -source = "git+https://github.com/heliaxdev/sparse-merkle-tree?branch=bat/arse-merkle-tree#04ad1eeb28901b57a7599bbe433b3822965dabe8" -dependencies = [ - "borsh", - "cfg-if 1.0.0", - "ics23", - "sha2 0.9.9", -] - -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "subtle" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" - -[[package]] -name = "subtle-encoding" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dcb1ed7b8330c5eed5441052651dd7a12c75e2ed88f2ec024ae1fa3a5e59945" -dependencies = [ - "zeroize", -] - -[[package]] -name = "subtle-ng" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "734676eb262c623cec13c3155096e08d1f8f29adce39ba17948b18dad1e54142" - -[[package]] -name = "syn" -version = "1.0.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "synstructure" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "unicode-xid", -] - -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "target-lexicon" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9bffcddbc2458fa3e6058414599e3c838a022abae82e5c67b4f7f80298d5bff" - -[[package]] -name = "tempfile" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" -dependencies = [ - "cfg-if 1.0.0", - "fastrand", - "libc", - "redox_syscall", - "remove_dir_all", - "winapi", -] - -[[package]] -name = "tendermint" -version = "0.23.6" -source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc2850830f4d8028c1fe1bd9f96284#87be41b8c9cc2850830f4d8028c1fe1bd9f96284" -dependencies = [ - "async-trait", - "bytes", - "ed25519", - "ed25519-dalek", - "flex-error", - "futures", - "num-traits", - "once_cell", - "prost", - "prost-types", - "serde", - "serde_bytes", - "serde_json", - "serde_repr", - "sha2 0.9.9", - "signature", - "subtle", - "subtle-encoding", - "tendermint-proto", - "time", - "zeroize", -] - -[[package]] -name = "tendermint-light-client-verifier" -version = "0.23.6" -source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc2850830f4d8028c1fe1bd9f96284#87be41b8c9cc2850830f4d8028c1fe1bd9f96284" -dependencies = [ - "derive_more", - "flex-error", - "serde", - "tendermint", - "time", -] - -[[package]] -name = "tendermint-proto" -version = "0.23.6" -source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc2850830f4d8028c1fe1bd9f96284#87be41b8c9cc2850830f4d8028c1fe1bd9f96284" -dependencies = [ - "bytes", - "flex-error", - "num-derive", - "num-traits", - "prost", - "prost-types", - "serde", - "serde_bytes", - "subtle-encoding", - "time", -] - -[[package]] -name = "tendermint-testgen" -version = "0.23.6" -source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc2850830f4d8028c1fe1bd9f96284#87be41b8c9cc2850830f4d8028c1fe1bd9f96284" -dependencies = [ - "ed25519-dalek", - "gumdrop", - "serde", - "serde_json", - "simple-error", - "tempfile", - "tendermint", - "time", -] - -[[package]] -name = "test-log" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb78caec569a40f42c078c798c0e35b922d9054ec28e166f0d6ac447563d91a4" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "thiserror" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "thread_local" -version = "1.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" -dependencies = [ - "once_cell", -] - -[[package]] -name = "time" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "004cbc98f30fa233c61a38bc77e96a9106e65c88f2d3bef182ae952027e5753d" -dependencies = [ - "libc", - "num_threads", - "time-macros", -] - -[[package]] -name = "time-macros" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25eb0ca3468fc0acc11828786797f6ef9aa1555e4a211a60d64cc8e4d1be47d6" - -[[package]] -name = "tiny-keccak" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" -dependencies = [ - "crunchy", -] - -[[package]] -name = "toml" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" -dependencies = [ - "serde", -] - -[[package]] -name = "tonic-build" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9403f1bafde247186684b230dc6f38b5cd514584e8bec1dd32514be4745fa757" -dependencies = [ - "proc-macro2", - "prost-build", - "quote", - "syn", -] - -[[package]] -name = "tracing" -version = "0.1.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0ecdcb44a79f0fe9844f0c4f33a342cbcbb5117de8001e6ba0dc2351327d09" -dependencies = [ - "cfg-if 1.0.0", - "log", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tracing-core" -version = "0.1.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23" -dependencies = [ - "lazy_static", - "valuable", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e0ab7bdc962035a87fba73f3acca9b8a8d0034c2e6f60b84aeaaddddc155dce" -dependencies = [ - "lazy_static", - "matchers", - "regex", - "sharded-slab", - "thread_local", - "tracing", - "tracing-core", -] - -[[package]] -name = "tx_template" -version = "0.7.1" -dependencies = [ - "borsh", - "getrandom", - "namada_tests", - "namada_tx_prelude", - "wee_alloc", -] - -[[package]] -name = "typenum" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" - -[[package]] -name = "ucd-trie" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" - -[[package]] -name = "uint" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f03af7ccf01dd611cc450a0d10dbc9b745770d096473e2faf0ca6e2d66d1e0" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - -[[package]] -name = "unicode-segmentation" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" - -[[package]] -name = "unicode-width" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" - -[[package]] -name = "unicode-xid" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" - -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "wait-timeout" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" -dependencies = [ - "libc", -] - -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - -[[package]] -name = "wasm-bindgen" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" -dependencies = [ - "cfg-if 1.0.0", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" -dependencies = [ - "bumpalo", - "lazy_static", - "log", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" - -[[package]] -name = "wasmer" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfc7dff846db3f38f8ed0be4a009fdfeb729cf1f94a2c7fb6ff2fec01cefa110" -dependencies = [ - "cfg-if 1.0.0", - "indexmap", - "js-sys", - "loupe", - "more-asserts", - "target-lexicon", - "thiserror", - "wasm-bindgen", - "wasmer-compiler", - "wasmer-compiler-cranelift", - "wasmer-derive", - "wasmer-engine", - "wasmer-engine-dylib", - "wasmer-engine-universal", - "wasmer-types", - "wasmer-vm", - "wat", - "winapi", -] - -[[package]] -name = "wasmer-cache" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "834a0de78bf30b9bce61c4c236344b9d8f2f4a3b7713f8de8a8274fbc2d4e9d5" -dependencies = [ - "blake3", - "hex", - "thiserror", - "wasmer", -] - -[[package]] -name = "wasmer-compiler" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c91abf22b16dad3826ec0d0e3ec0a8304262a6c7a14e16528c536131b80e63d" -dependencies = [ - "enumset", - "loupe", - "rkyv", - "serde", - "serde_bytes", - "smallvec", - "target-lexicon", - "thiserror", - "wasmer-types", - "wasmer-vm", - "wasmparser 0.78.2", -] - -[[package]] -name = "wasmer-compiler-cranelift" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7624a1f496b163139a7e0b442426cad805bec70486900287506f9d15a29323ab" -dependencies = [ - "cranelift-codegen", - "cranelift-entity", - "cranelift-frontend", - "gimli 0.25.0", - "loupe", - "more-asserts", - "rayon", - "smallvec", - "target-lexicon", - "tracing", - "wasmer-compiler", - "wasmer-types", - "wasmer-vm", -] - -[[package]] -name = "wasmer-compiler-singlepass" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b63c1538ffb4b0e09edaebfcac35c34141d5944c52f77d137cbe0b634bd40fa" -dependencies = [ - "byteorder", - "dynasm", - "dynasmrt", - "lazy_static", - "loupe", - "more-asserts", - "rayon", - "smallvec", - "wasmer-compiler", - "wasmer-types", - "wasmer-vm", -] - -[[package]] -name = "wasmer-derive" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "933b23b5cee0f58aa6c17c6de7e1f3007279357e0d555f22e24d6b395cfe7f89" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "wasmer-engine" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41db0ac4df90610cda8320cfd5abf90c6ec90e298b6fe5a09a81dff718b55640" -dependencies = [ - "backtrace", - "enumset", - "lazy_static", - "loupe", - "memmap2", - "more-asserts", - "rustc-demangle", - "serde", - "serde_bytes", - "target-lexicon", - "thiserror", - "wasmer-compiler", - "wasmer-types", - "wasmer-vm", -] - -[[package]] -name = "wasmer-engine-dylib" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "591683f3356ac31cc88aaecaf77ac2cc9f456014348b01af46c164f44f531162" -dependencies = [ - "cfg-if 1.0.0", - "enum-iterator", - "enumset", - "leb128", - "libloading", - "loupe", - "object 0.28.3", - "rkyv", - "serde", - "tempfile", - "tracing", - "wasmer-compiler", - "wasmer-engine", - "wasmer-object", - "wasmer-types", - "wasmer-vm", - "which", -] - -[[package]] -name = "wasmer-engine-universal" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dccfde103e9b87427099a6de344b7c791574f307d035c8c7dbbc00974c1af0c1" -dependencies = [ - "cfg-if 1.0.0", - "enum-iterator", - "enumset", - "leb128", - "loupe", - "region", - "rkyv", - "wasmer-compiler", - "wasmer-engine", - "wasmer-types", - "wasmer-vm", - "winapi", -] - -[[package]] -name = "wasmer-object" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d0c4005592998bd840f2289102ef9c67b6138338ed78e1fc0809586aa229040" -dependencies = [ - "object 0.28.3", - "thiserror", - "wasmer-compiler", - "wasmer-types", -] - -[[package]] -name = "wasmer-types" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4deb854f178265a76b59823c41547d259c65da3687b606b0b9c12d80ab950e3e" -dependencies = [ - "indexmap", - "loupe", - "rkyv", - "serde", - "thiserror", -] - -[[package]] -name = "wasmer-vm" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dbc5c989cb14a102433927e630473da52f83d82c469acd5cfa8fc7efacc1e70" -dependencies = [ - "backtrace", - "cc", - "cfg-if 1.0.0", - "enum-iterator", - "indexmap", - "libc", - "loupe", - "memoffset", - "more-asserts", - "region", - "rkyv", - "serde", - "thiserror", - "wasmer-types", - "winapi", -] - -[[package]] -name = "wasmparser" -version = "0.78.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52144d4c78e5cf8b055ceab8e5fa22814ce4315d6002ad32cfd914f37c12fd65" - -[[package]] -name = "wasmparser" -version = "0.83.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "718ed7c55c2add6548cca3ddd6383d738cd73b892df400e96b9aa876f0141d7a" - -[[package]] -name = "wast" -version = "39.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9bbbd53432b267421186feee3e52436531fa69a7cfee9403f5204352df3dd05" -dependencies = [ - "leb128", - "memchr", - "unicode-width", -] - -[[package]] -name = "wat" -version = "1.0.41" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab98ed25494f97c69f28758617f27c3e92e5336040b5c3a14634f2dd3fe61830" -dependencies = [ - "wast", -] - -[[package]] -name = "wee_alloc" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb3b5a6b2bb17cb6ad44a2e68a43e8d2722c997da10e928665c72ec6c0a0b8e" -dependencies = [ - "cfg-if 0.1.10", - "libc", - "memory_units", - "winapi", -] - -[[package]] -name = "which" -version = "4.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a5a7e487e921cf220206864a94a89b6c6905bfc19f1057fa26a4cb360e5c1d2" -dependencies = [ - "either", - "lazy_static", - "libc", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "wyz" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b31594f29d27036c383b53b59ed3476874d518f0efb151b27a4c275141390e" -dependencies = [ - "tap", -] - -[[package]] -name = "zeroize" -version = "1.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f8f187641dad4f680d25c4bfc4225b418165984179f26ca76ec4fb6441d3a17" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] diff --git a/wasm/tx_template/Cargo.toml b/wasm/tx_template/Cargo.toml index 0c4a29c8b1..a86dcbb07a 100644 --- a/wasm/tx_template/Cargo.toml +++ b/wasm/tx_template/Cargo.toml @@ -4,7 +4,7 @@ edition = "2021" license = "GPL-3.0" name = "tx_template" resolver = "2" -version = "0.7.1" +version = "0.8.1" [lib] crate-type = ["cdylib"] @@ -17,28 +17,3 @@ getrandom = { version = "0.2", features = ["custom"] } [dev-dependencies] namada_tests = {path = "../../tests"} - -[patch.crates-io] -# TODO temp patch for , and more tba. -borsh = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} -borsh-derive = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} -borsh-derive-internal = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} -borsh-schema-derive-internal = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} - -# patched to a commit on the `eth-bridge-integration` branch of our fork -tendermint = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} -tendermint-proto = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} -tendermint-testgen = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} -tendermint-light-client-verifier = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} - -# patched to a commit on the `eth-bridge-integration` branch of our fork -ibc = {git = "https://github.com/heliaxdev/ibc-rs.git", rev = "f4703dfe2c1f25cc431279ab74f10f3e0f6827e2"} -ibc-proto = {git = "https://github.com/heliaxdev/ibc-rs.git", rev = "f4703dfe2c1f25cc431279ab74f10f3e0f6827e2"} - -[profile.release] -# smaller and faster wasm (https://rustwasm.github.io/book/reference/code-size.html#compiling-with-link-time-optimizations-lto) -lto = true -# simply terminate on panics, no unwinding -panic = "abort" -# tell llvm to optimize for size (https://rustwasm.github.io/book/reference/code-size.html#tell-llvm-to-optimize-for-size-instead-of-speed) -opt-level = 'z' diff --git a/wasm/tx_template/src/lib.rs b/wasm/tx_template/src/lib.rs index f507e90bed..473984aa31 100644 --- a/wasm/tx_template/src/lib.rs +++ b/wasm/tx_template/src/lib.rs @@ -1,8 +1,9 @@ use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { +fn apply_tx(_ctx: &mut Ctx, tx_data: Vec) -> TxResult { log_string(format!("apply_tx called with data: {:#?}", tx_data)); + Ok(()) } #[cfg(test)] @@ -19,7 +20,7 @@ mod tests { tx_host_env::init(); let tx_data = vec![]; - apply_tx(tx_data); + apply_tx(ctx(), tx_data).unwrap(); let env = tx_host_env::take(); assert!(env.all_touched_storage_keys().is_empty()); diff --git a/wasm/vp_template/Cargo.lock b/wasm/vp_template/Cargo.lock deleted file mode 100644 index 392c010281..0000000000 --- a/wasm/vp_template/Cargo.lock +++ /dev/null @@ -1,3106 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "addr2line" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b" -dependencies = [ - "gimli 0.26.1", -] - -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - -[[package]] -name = "ahash" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" -dependencies = [ - "getrandom", - "once_cell", - "version_check", -] - -[[package]] -name = "aho-corasick" -version = "0.7.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" -dependencies = [ - "memchr", -] - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "anyhow" -version = "1.0.55" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "159bb86af3a200e19a068f4224eae4c8bb2d0fa054c7e5d1cacd5cef95e684cd" - -[[package]] -name = "ark-bls12-381" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65be532f9dd1e98ad0150b037276cde464c6f371059e6dd02c0222395761f6aa" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-std", -] - -[[package]] -name = "ark-ec" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dea978406c4b1ca13c2db2373b05cc55429c3575b8b21f1b9ee859aa5b03dd42" -dependencies = [ - "ark-ff", - "ark-serialize", - "ark-std", - "derivative", - "num-traits", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" -dependencies = [ - "ark-ff-asm", - "ark-ff-macros", - "ark-serialize", - "ark-std", - "derivative", - "num-bigint", - "num-traits", - "paste", - "rustc_version", - "zeroize", -] - -[[package]] -name = "ark-ff-asm" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" -dependencies = [ - "quote", - "syn", -] - -[[package]] -name = "ark-ff-macros" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" -dependencies = [ - "num-bigint", - "num-traits", - "quote", - "syn", -] - -[[package]] -name = "ark-serialize" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" -dependencies = [ - "ark-serialize-derive", - "ark-std", - "digest 0.9.0", -] - -[[package]] -name = "ark-serialize-derive" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8dd4e5f0bf8285d5ed538d27fab7411f3e297908fd93c62195de8bee3f199e82" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "ark-std" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" -dependencies = [ - "num-traits", - "rand", -] - -[[package]] -name = "arrayref" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" - -[[package]] -name = "arrayvec" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" - -[[package]] -name = "async-trait" -version = "0.1.52" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "061a7acccaa286c011ddc30970520b98fa40e00c9d644633fb26b5fc63a265e3" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "backtrace" -version = "0.3.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e121dee8023ce33ab248d9ce1493df03c3b38a659b240096fcbd7048ff9c31f" -dependencies = [ - "addr2line", - "cc", - "cfg-if 1.0.0", - "libc", - "miniz_oxide", - "object 0.27.1", - "rustc-demangle", -] - -[[package]] -name = "base64" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" - -[[package]] -name = "bech32" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9ff0bbfd639f15c74af777d81383cf53efb7c93613f6cab67c6c11e05bbf8b" - -[[package]] -name = "bit-set" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] - -[[package]] -name = "blake3" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "882e99e4a0cb2ae6cb6e442102e8e6b7131718d94110e64c3e6a34ea9b106f37" -dependencies = [ - "arrayref", - "arrayvec", - "cc", - "cfg-if 1.0.0", - "constant_time_eq", - "digest 0.10.3", -] - -[[package]] -name = "block-buffer" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" -dependencies = [ - "block-padding", - "generic-array", -] - -[[package]] -name = "block-buffer" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1d36a02058e76b040de25a4464ba1c80935655595b661505c8b39b664828b95" -dependencies = [ - "generic-array", -] - -[[package]] -name = "block-padding" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" - -[[package]] -name = "borsh" -version = "0.9.4" -source = "git+https://github.com/heliaxdev/borsh-rs.git?rev=cd5223e5103c4f139e0c54cf8259b7ec5ec4073a#cd5223e5103c4f139e0c54cf8259b7ec5ec4073a" -dependencies = [ - "borsh-derive", - "hashbrown", -] - -[[package]] -name = "borsh-derive" -version = "0.9.4" -source = "git+https://github.com/heliaxdev/borsh-rs.git?rev=cd5223e5103c4f139e0c54cf8259b7ec5ec4073a#cd5223e5103c4f139e0c54cf8259b7ec5ec4073a" -dependencies = [ - "borsh-derive-internal", - "borsh-schema-derive-internal", - "proc-macro-crate 0.1.5", - "proc-macro2", - "syn", -] - -[[package]] -name = "borsh-derive-internal" -version = "0.9.4" -source = "git+https://github.com/heliaxdev/borsh-rs.git?rev=cd5223e5103c4f139e0c54cf8259b7ec5ec4073a#cd5223e5103c4f139e0c54cf8259b7ec5ec4073a" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "borsh-schema-derive-internal" -version = "0.9.4" -source = "git+https://github.com/heliaxdev/borsh-rs.git?rev=cd5223e5103c4f139e0c54cf8259b7ec5ec4073a#cd5223e5103c4f139e0c54cf8259b7ec5ec4073a" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "bumpalo" -version = "3.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" - -[[package]] -name = "byte-slice-cast" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87c5fdd0166095e1d463fc6cc01aa8ce547ad77a4e84d42eb6762b084e28067e" - -[[package]] -name = "bytecheck" -version = "0.6.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "314889ea31cda264cb7c3d6e6e5c9415a987ecb0e72c17c00d36fbb881d34abe" -dependencies = [ - "bytecheck_derive", - "ptr_meta", -] - -[[package]] -name = "bytecheck_derive" -version = "0.6.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a2b3b92c135dae665a6f760205b89187638e83bed17ef3e44e83c712cf30600" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "byteorder" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" - -[[package]] -name = "bytes" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" - -[[package]] -name = "cc" -version = "1.0.73" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" - -[[package]] -name = "cfg-if" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "chrono" -version = "0.4.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1" -dependencies = [ - "iana-time-zone", - "num-integer", - "num-traits", - "winapi", -] - -[[package]] -name = "clru" -version = "0.5.0" -source = "git+https://github.com/marmeladema/clru-rs.git?rev=71ca566#71ca566915f21f3c308091ca7756a91b0f8b5afc" - -[[package]] -name = "concat-idents" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6f90860248d75014b7b103db8fee4f291c07bfb41306cdf77a0a5ab7a10d2f" -dependencies = [ - "quote", - "syn", -] - -[[package]] -name = "constant_time_eq" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" - -[[package]] -name = "core-foundation-sys" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" - -[[package]] -name = "cpufeatures" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" -dependencies = [ - "libc", -] - -[[package]] -name = "cranelift-bforest" -version = "0.76.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e6bea67967505247f54fa2c85cf4f6e0e31c4e5692c9b70e4ae58e339067333" -dependencies = [ - "cranelift-entity", -] - -[[package]] -name = "cranelift-codegen" -version = "0.76.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48194035d2752bdd5bdae429e3ab88676e95f52a2b1355a5d4e809f9e39b1d74" -dependencies = [ - "cranelift-bforest", - "cranelift-codegen-meta", - "cranelift-codegen-shared", - "cranelift-entity", - "gimli 0.25.0", - "log", - "regalloc", - "smallvec", - "target-lexicon", -] - -[[package]] -name = "cranelift-codegen-meta" -version = "0.76.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976efb22fcab4f2cd6bd4e9913764616a54d895c1a23530128d04e03633c555f" -dependencies = [ - "cranelift-codegen-shared", - "cranelift-entity", -] - -[[package]] -name = "cranelift-codegen-shared" -version = "0.76.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dabb5fe66e04d4652e434195b45ae65b5c8172d520247b8f66d8df42b2b45dc" - -[[package]] -name = "cranelift-entity" -version = "0.76.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3329733e4d4b8e91c809efcaa4faee80bf66f20164e3dd16d707346bd3494799" - -[[package]] -name = "cranelift-frontend" -version = "0.76.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "279afcc0d3e651b773f94837c3d581177b348c8d69e928104b2e9fccb226f921" -dependencies = [ - "cranelift-codegen", - "log", - "smallvec", - "target-lexicon", -] - -[[package]] -name = "crc32fast" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" -dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "crossbeam-channel" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e54ea8bc3fb1ee042f5aace6e3c6e025d3874866da222930f70ce62aceba0bfa" -dependencies = [ - "cfg-if 1.0.0", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" -dependencies = [ - "cfg-if 1.0.0", - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c00d6d2ea26e8b151d99093005cb442fb9a37aeaca582a03ec70946f49ab5ed9" -dependencies = [ - "cfg-if 1.0.0", - "crossbeam-utils", - "lazy_static", - "memoffset", - "scopeguard", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e5bed1f1c269533fa816a0a5492b3545209a205ca1a54842be180eb63a16a6" -dependencies = [ - "cfg-if 1.0.0", - "lazy_static", -] - -[[package]] -name = "crunchy" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" - -[[package]] -name = "crypto-common" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "curve25519-dalek" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9fdf9972b2bd6af2d913799d9ebc165ea4d2e65878e329d9c6b372c4491b61" -dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.5.1", - "subtle", - "zeroize", -] - -[[package]] -name = "curve25519-dalek-ng" -version = "4.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c359b7249347e46fb28804470d071c921156ad62b3eef5d34e2ba867533dec8" -dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.6.3", - "subtle-ng", - "zeroize", -] - -[[package]] -name = "darling" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0d720b8683f8dd83c65155f0530560cba68cd2bf395f6513a483caee57ff7f4" -dependencies = [ - "darling_core", - "darling_macro", -] - -[[package]] -name = "darling_core" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a340f241d2ceed1deb47ae36c4144b2707ec7dd0b649f894cb39bb595986324" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn", -] - -[[package]] -name = "darling_macro" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72c41b3b7352feb3211a0d743dc5700a4e3b60f51bd2b368892d1e0f9a95f44b" -dependencies = [ - "darling_core", - "quote", - "syn", -] - -[[package]] -name = "data-encoding" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57" - -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "derive_more" -version = "0.99.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array", -] - -[[package]] -name = "digest" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" -dependencies = [ - "block-buffer 0.10.0", - "crypto-common", - "subtle", -] - -[[package]] -name = "dynasm" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47b1801e630bd336d0bbbdbf814de6cc749c9a400c7e3d995e6adfd455d0c83c" -dependencies = [ - "bitflags", - "byteorder", - "lazy_static", - "proc-macro-error", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "dynasmrt" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d428afc93ad288f6dffc1fa5f4a78201ad2eec33c5a522e51c181009eb09061" -dependencies = [ - "byteorder", - "dynasm", - "memmap2", -] - -[[package]] -name = "ed25519" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eed12bbf7b5312f8da1c2722bc06d8c6b12c2d86a7fb35a194c7f3e6fc2bbe39" -dependencies = [ - "signature", -] - -[[package]] -name = "ed25519-consensus" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "542217a53411d471743362251a5a1770a667cb0cc0384c9be2c0952bd70a7275" -dependencies = [ - "curve25519-dalek-ng", - "hex", - "rand_core 0.6.3", - "serde", - "sha2 0.9.9", - "thiserror", - "zeroize", -] - -[[package]] -name = "ed25519-dalek" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" -dependencies = [ - "curve25519-dalek", - "ed25519", - "sha2 0.9.9", - "zeroize", -] - -[[package]] -name = "either" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" - -[[package]] -name = "enum-iterator" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eeac5c5edb79e4e39fe8439ef35207780a11f69c52cbe424ce3dfad4cb78de6" -dependencies = [ - "enum-iterator-derive", -] - -[[package]] -name = "enum-iterator-derive" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c134c37760b27a871ba422106eedbb8247da973a09e82558bf26d619c882b159" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "enumset" -version = "1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6216d2c19a6fb5f29d1ada1dc7bc4367a8cbf0fa4af5cf12e07b5bbdde6b5b2c" -dependencies = [ - "enumset_derive", -] - -[[package]] -name = "enumset_derive" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6451128aa6655d880755345d085494cf7561a6bee7c8dc821e5d77e6d267ecd4" -dependencies = [ - "darling", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "ethabi" -version = "17.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4966fba78396ff92db3b817ee71143eccd98acf0f876b8d600e585a670c5d1b" -dependencies = [ - "ethereum-types", - "hex", - "once_cell", - "regex", - "serde", - "serde_json", - "sha3 0.10.2", - "thiserror", - "uint", -] - -[[package]] -name = "ethbloom" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11da94e443c60508eb62cf256243a64da87304c2802ac2528847f79d750007ef" -dependencies = [ - "crunchy", - "fixed-hash", - "impl-rlp", - "impl-serde", - "tiny-keccak", -] - -[[package]] -name = "ethereum-types" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2827b94c556145446fcce834ca86b7abf0c39a805883fe20e72c5bfdb5a0dc6" -dependencies = [ - "ethbloom", - "fixed-hash", - "impl-rlp", - "impl-serde", - "primitive-types", - "uint", -] - -[[package]] -name = "eyre" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" -dependencies = [ - "indenter", - "once_cell", -] - -[[package]] -name = "fallible-iterator" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" - -[[package]] -name = "fastrand" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" -dependencies = [ - "instant", -] - -[[package]] -name = "ferveo-common" -version = "0.1.0" -source = "git+https://github.com/anoma/ferveo#8363c33d1cf79f93ce9fa89d4b5fe998a5a78c26" -dependencies = [ - "anyhow", - "ark-ec", - "ark-serialize", - "ark-std", - "serde", - "serde_bytes", -] - -[[package]] -name = "fixed-hash" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" -dependencies = [ - "byteorder", - "rand", - "rustc-hex", - "static_assertions", -] - -[[package]] -name = "fixedbitset" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "279fb028e20b3c4c320317955b77c5e0c9701f05a1d309905d6fc702cdc5053e" - -[[package]] -name = "flex-error" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c606d892c9de11507fa0dcffc116434f94e105d0bbdc4e405b61519464c49d7b" -dependencies = [ - "eyre", - "paste", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - -[[package]] -name = "futures" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" - -[[package]] -name = "futures-io" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" - -[[package]] -name = "futures-sink" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" - -[[package]] -name = "futures-task" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" - -[[package]] -name = "futures-util" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" -dependencies = [ - "futures-core", - "futures-sink", - "futures-task", - "pin-project-lite", - "pin-utils", -] - -[[package]] -name = "generic-array" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77" -dependencies = [ - "cfg-if 1.0.0", - "libc", - "wasi", -] - -[[package]] -name = "gimli" -version = "0.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0a01e0497841a3b2db4f8afa483cce65f7e96a3498bd6c541734792aeac8fe7" -dependencies = [ - "fallible-iterator", - "indexmap", - "stable_deref_trait", -] - -[[package]] -name = "gimli" -version = "0.26.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4" - -[[package]] -name = "gumdrop" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46571f5d540478cf70d2a42dd0d6d8e9f4b9cc7531544b93311e657b86568a0b" -dependencies = [ - "gumdrop_derive", -] - -[[package]] -name = "gumdrop_derive" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915ef07c710d84733522461de2a734d4d62a3fd39a4d4f404c2f385ef8618d05" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" -dependencies = [ - "ahash", -] - -[[package]] -name = "heck" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "iana-time-zone" -version = "0.1.50" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd911b35d940d2bd0bea0f9100068e5b97b51a1cbe13d13382f132e0365257a0" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "js-sys", - "wasm-bindgen", - "winapi", -] - -[[package]] -name = "ibc" -version = "0.14.0" -source = "git+https://github.com/heliaxdev/ibc-rs.git?rev=f4703dfe2c1f25cc431279ab74f10f3e0f6827e2#f4703dfe2c1f25cc431279ab74f10f3e0f6827e2" -dependencies = [ - "bytes", - "derive_more", - "flex-error", - "ibc-proto", - "ics23", - "num-traits", - "prost", - "prost-types", - "safe-regex", - "serde", - "serde_derive", - "serde_json", - "sha2 0.10.2", - "subtle-encoding", - "tendermint", - "tendermint-light-client-verifier", - "tendermint-proto", - "tendermint-testgen", - "time", - "tracing", -] - -[[package]] -name = "ibc-proto" -version = "0.17.1" -source = "git+https://github.com/heliaxdev/ibc-rs.git?rev=f4703dfe2c1f25cc431279ab74f10f3e0f6827e2#f4703dfe2c1f25cc431279ab74f10f3e0f6827e2" -dependencies = [ - "base64", - "bytes", - "prost", - "prost-types", - "serde", - "tendermint-proto", -] - -[[package]] -name = "ics23" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d454cc0a22bd556cc3d3c69f9d75a392a36244634840697a4b9eb81bc5c8ae0" -dependencies = [ - "anyhow", - "bytes", - "hex", - "prost", - "ripemd160", - "sha2 0.9.9", - "sha3 0.9.1", - "sp-std", -] - -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - -[[package]] -name = "impl-codec" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" -dependencies = [ - "parity-scale-codec", -] - -[[package]] -name = "impl-rlp" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" -dependencies = [ - "rlp", -] - -[[package]] -name = "impl-serde" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4551f042f3438e64dbd6226b20527fc84a6e1fe65688b58746a2f53623f25f5c" -dependencies = [ - "serde", -] - -[[package]] -name = "impl-trait-for-tuples" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "indenter" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" - -[[package]] -name = "indexmap" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" -dependencies = [ - "autocfg", - "hashbrown", - "serde", -] - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "itertools" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" - -[[package]] -name = "js-sys" -version = "0.3.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "keccak" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67c21572b4949434e4fc1e1978b99c5f77064153c59d998bf13ecd96fb5ecba7" - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "leb128" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" - -[[package]] -name = "libc" -version = "0.2.134" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "329c933548736bc49fd575ee68c89e8be4d260064184389a5b77517cddd99ffb" - -[[package]] -name = "libloading" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afe203d669ec979b7128619bae5a63b7b42e9203c1b29146079ee05e2f604b52" -dependencies = [ - "cfg-if 1.0.0", - "winapi", -] - -[[package]] -name = "libsecp256k1" -version = "0.7.0" -source = "git+https://github.com/heliaxdev/libsecp256k1?rev=bbb3bd44a49db361f21d9db80f9a087c194c0ae9#bbb3bd44a49db361f21d9db80f9a087c194c0ae9" -dependencies = [ - "arrayref", - "base64", - "digest 0.9.0", - "libsecp256k1-core", - "libsecp256k1-gen-ecmult", - "libsecp256k1-gen-genmult", - "rand", - "serde", - "sha2 0.9.9", -] - -[[package]] -name = "libsecp256k1-core" -version = "0.3.0" -source = "git+https://github.com/heliaxdev/libsecp256k1?rev=bbb3bd44a49db361f21d9db80f9a087c194c0ae9#bbb3bd44a49db361f21d9db80f9a087c194c0ae9" -dependencies = [ - "crunchy", - "digest 0.9.0", - "subtle", -] - -[[package]] -name = "libsecp256k1-gen-ecmult" -version = "0.3.0" -source = "git+https://github.com/heliaxdev/libsecp256k1?rev=bbb3bd44a49db361f21d9db80f9a087c194c0ae9#bbb3bd44a49db361f21d9db80f9a087c194c0ae9" -dependencies = [ - "libsecp256k1-core", -] - -[[package]] -name = "libsecp256k1-gen-genmult" -version = "0.3.0" -source = "git+https://github.com/heliaxdev/libsecp256k1?rev=bbb3bd44a49db361f21d9db80f9a087c194c0ae9#bbb3bd44a49db361f21d9db80f9a087c194c0ae9" -dependencies = [ - "libsecp256k1-core", -] - -[[package]] -name = "log" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" -dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "loupe" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6a72dfa44fe15b5e76b94307eeb2ff995a8c5b283b55008940c02e0c5b634d" -dependencies = [ - "indexmap", - "loupe-derive", - "rustversion", -] - -[[package]] -name = "loupe-derive" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0fbfc88337168279f2e9ae06e157cfed4efd3316e14dc96ed074d4f2e6c5952" -dependencies = [ - "quote", - "syn", -] - -[[package]] -name = "mach" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" -dependencies = [ - "libc", -] - -[[package]] -name = "matchers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" -dependencies = [ - "regex-automata", -] - -[[package]] -name = "memchr" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" - -[[package]] -name = "memmap2" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057a3db23999c867821a7a59feb06a578fcb03685e983dff90daf9e7d24ac08f" -dependencies = [ - "libc", -] - -[[package]] -name = "memoffset" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" -dependencies = [ - "autocfg", -] - -[[package]] -name = "memory_units" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8452105ba047068f40ff7093dd1d9da90898e63dd61736462e9cdda6a90ad3c3" - -[[package]] -name = "miniz_oxide" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" -dependencies = [ - "adler", - "autocfg", -] - -[[package]] -name = "more-asserts" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" - -[[package]] -name = "multimap" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" - -[[package]] -name = "namada" -version = "0.7.1" -dependencies = [ - "ark-bls12-381", - "ark-serialize", - "bech32", - "borsh", - "chrono", - "clru", - "data-encoding", - "derivative", - "ed25519-consensus", - "ethabi", - "eyre", - "ferveo-common", - "hex", - "ibc", - "ibc-proto", - "ics23", - "itertools", - "libsecp256k1", - "loupe", - "namada_proof_of_stake", - "num-rational", - "parity-wasm", - "proptest", - "prost", - "prost-types", - "pwasm-utils", - "rand", - "rand_core 0.6.3", - "rust_decimal", - "serde", - "serde_json", - "sha2 0.9.9", - "sparse-merkle-tree", - "tempfile", - "tendermint", - "tendermint-proto", - "thiserror", - "tiny-keccak", - "tonic-build", - "tracing", - "wasmer", - "wasmer-cache", - "wasmer-compiler-singlepass", - "wasmer-engine-dylib", - "wasmer-engine-universal", - "wasmer-vm", - "wasmparser 0.83.0", - "zeroize", -] - -[[package]] -name = "namada_macros" -version = "0.7.1" -dependencies = [ - "quote", - "syn", -] - -[[package]] -name = "namada_proof_of_stake" -version = "0.7.1" -dependencies = [ - "borsh", - "proptest", - "thiserror", -] - -[[package]] -name = "namada_tests" -version = "0.7.1" -dependencies = [ - "chrono", - "concat-idents", - "derivative", - "namada", - "namada_vm_env", - "prost", - "serde_json", - "sha2 0.9.9", - "tempfile", - "test-log", - "tracing", - "tracing-subscriber", -] - -[[package]] -name = "namada_vm_env" -version = "0.7.1" -dependencies = [ - "borsh", - "hex", - "namada", - "namada_macros", -] - -[[package]] -name = "namada_vp_prelude" -version = "0.7.1" -dependencies = [ - "namada_vm_env", - "sha2 0.10.2", -] - -[[package]] -name = "num-bigint" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-derive" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "num-integer" -version = "0.1.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" -dependencies = [ - "autocfg", - "num-traits", -] - -[[package]] -name = "num-rational" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" -dependencies = [ - "autocfg", - "num-bigint", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" -dependencies = [ - "autocfg", -] - -[[package]] -name = "num_cpus" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "num_threads" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97ba99ba6393e2c3734791401b66902d981cb03bf190af674ca69949b6d5fb15" -dependencies = [ - "libc", -] - -[[package]] -name = "object" -version = "0.27.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9" -dependencies = [ - "memchr", -] - -[[package]] -name = "object" -version = "0.28.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40bec70ba014595f99f7aa110b84331ffe1ee9aece7fe6f387cc7e3ecda4d456" -dependencies = [ - "crc32fast", - "hashbrown", - "indexmap", - "memchr", -] - -[[package]] -name = "once_cell" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e" - -[[package]] -name = "opaque-debug" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" - -[[package]] -name = "parity-scale-codec" -version = "3.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9182e4a71cae089267ab03e67c99368db7cd877baf50f931e5d6d4b71e195ac0" -dependencies = [ - "arrayvec", - "bitvec", - "byte-slice-cast", - "impl-trait-for-tuples", - "parity-scale-codec-derive", - "serde", -] - -[[package]] -name = "parity-scale-codec-derive" -version = "3.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9299338969a3d2f491d65f140b00ddec470858402f888af98e8642fb5e8965cd" -dependencies = [ - "proc-macro-crate 1.2.1", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "parity-wasm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be5e13c266502aadf83426d87d81a0f5d1ef45b8027f5a471c360abfe4bfae92" - -[[package]] -name = "paste" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0744126afe1a6dd7f394cb50a716dbe086cb06e255e53d8d0185d82828358fb5" - -[[package]] -name = "pest" -version = "2.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" -dependencies = [ - "ucd-trie", -] - -[[package]] -name = "petgraph" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a13a2fa9d0b63e5f22328828741e523766fff0ee9e779316902290dff3f824f" -dependencies = [ - "fixedbitset", - "indexmap", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "ppv-lite86" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" - -[[package]] -name = "primitive-types" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28720988bff275df1f51b171e1b2a18c30d194c4d2b61defdacecd625a5d94a" -dependencies = [ - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "uint", -] - -[[package]] -name = "proc-macro-crate" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" -dependencies = [ - "toml", -] - -[[package]] -name = "proc-macro-crate" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9" -dependencies = [ - "once_cell", - "thiserror", - "toml", -] - -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - -[[package]] -name = "proc-macro2" -version = "1.0.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" -dependencies = [ - "unicode-xid", -] - -[[package]] -name = "proptest" -version = "1.0.0" -source = "git+https://github.com/heliaxdev/proptest?branch=tomas/sm#b9517a726c032897a8b41c215147f44588b33dcc" -dependencies = [ - "bit-set", - "bitflags", - "byteorder", - "lazy_static", - "num-traits", - "quick-error 2.0.1", - "rand", - "rand_chacha", - "rand_xorshift", - "regex-syntax", - "rusty-fork", - "tempfile", -] - -[[package]] -name = "prost" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" -dependencies = [ - "bytes", - "prost-derive", -] - -[[package]] -name = "prost-build" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" -dependencies = [ - "bytes", - "heck", - "itertools", - "lazy_static", - "log", - "multimap", - "petgraph", - "prost", - "prost-types", - "regex", - "tempfile", - "which", -] - -[[package]] -name = "prost-derive" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9cc1a3263e07e0bf68e96268f37665207b49560d98739662cdfaae215c720fe" -dependencies = [ - "anyhow", - "itertools", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "prost-types" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" -dependencies = [ - "bytes", - "prost", -] - -[[package]] -name = "ptr_meta" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" -dependencies = [ - "ptr_meta_derive", -] - -[[package]] -name = "ptr_meta_derive" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "pwasm-utils" -version = "0.18.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "880b3384fb00b8f6ecccd5d358b93bd2201900ae3daad213791d1864f6441f5c" -dependencies = [ - "byteorder", - "log", - "parity-wasm", -] - -[[package]] -name = "quick-error" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" - -[[package]] -name = "quick-error" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" - -[[package]] -name = "quote" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha", - "rand_core 0.6.3", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.3", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" - -[[package]] -name = "rand_core" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" -dependencies = [ - "getrandom", -] - -[[package]] -name = "rand_xorshift" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" -dependencies = [ - "rand_core 0.6.3", -] - -[[package]] -name = "rayon" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" -dependencies = [ - "autocfg", - "crossbeam-deque", - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" -dependencies = [ - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-utils", - "lazy_static", - "num_cpus", -] - -[[package]] -name = "redox_syscall" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8380fe0152551244f0747b1bf41737e0f8a74f97a14ccefd1148187271634f3c" -dependencies = [ - "bitflags", -] - -[[package]] -name = "regalloc" -version = "0.0.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "571f7f397d61c4755285cd37853fe8e03271c243424a907415909379659381c5" -dependencies = [ - "log", - "rustc-hash", - "smallvec", -] - -[[package]] -name = "regex" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.6.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" - -[[package]] -name = "region" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76e189c2369884dce920945e2ddf79b3dff49e071a167dd1817fa9c4c00d512e" -dependencies = [ - "bitflags", - "libc", - "mach", - "winapi", -] - -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] - -[[package]] -name = "rend" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79af64b4b6362ffba04eef3a4e10829718a4896dac19daa741851c86781edf95" -dependencies = [ - "bytecheck", -] - -[[package]] -name = "ripemd160" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eca4ecc81b7f313189bf73ce724400a07da2a6dac19588b03c8bd76a2dcc251" -dependencies = [ - "block-buffer 0.9.0", - "digest 0.9.0", - "opaque-debug", -] - -[[package]] -name = "rkyv" -version = "0.7.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a37de5dfc60bae2d94961dacd03c7b80e426b66a99fa1b17799570dbdd8f96" -dependencies = [ - "bytecheck", - "hashbrown", - "ptr_meta", - "rend", - "rkyv_derive", - "seahash", -] - -[[package]] -name = "rkyv_derive" -version = "0.7.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719d447dd0e84b23cee6cb5b32d97e21efb112a3e3c636c8da36647b938475a1" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "rlp" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "999508abb0ae792aabed2460c45b89106d97fe4adac593bdaef433c2605847b5" -dependencies = [ - "bytes", - "rustc-hex", -] - -[[package]] -name = "rust_decimal" -version = "1.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d37baa70cf8662d2ba1c1868c5983dda16ef32b105cce41fb5c47e72936a90b3" -dependencies = [ - "arrayvec", - "num-traits", - "serde", -] - -[[package]] -name = "rustc-demangle" -version = "0.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" - -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - -[[package]] -name = "rustc-hex" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" - -[[package]] -name = "rustc_version" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" -dependencies = [ - "semver", -] - -[[package]] -name = "rustversion" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" - -[[package]] -name = "rusty-fork" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" -dependencies = [ - "fnv", - "quick-error 1.2.3", - "tempfile", - "wait-timeout", -] - -[[package]] -name = "ryu" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" - -[[package]] -name = "safe-proc-macro2" -version = "1.0.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "814c536dcd27acf03296c618dab7ad62d28e70abd7ba41d3f34a2ce707a2c666" -dependencies = [ - "unicode-xid", -] - -[[package]] -name = "safe-quote" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77e530f7831f3feafcd5f1aae406ac205dd998436b4007c8e80f03eca78a88f7" -dependencies = [ - "safe-proc-macro2", -] - -[[package]] -name = "safe-regex" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15289bf322e0673d52756a18194167f2378ec1a15fe884af6e2d2cb934822b0" -dependencies = [ - "safe-regex-macro", -] - -[[package]] -name = "safe-regex-compiler" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fba76fae590a2aa665279deb1f57b5098cbace01a0c5e60e262fcf55f7c51542" -dependencies = [ - "safe-proc-macro2", - "safe-quote", -] - -[[package]] -name = "safe-regex-macro" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96c2e96b5c03f158d1b16ba79af515137795f4ad4e8de3f790518aae91f1d127" -dependencies = [ - "safe-proc-macro2", - "safe-regex-compiler", -] - -[[package]] -name = "scopeguard" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" - -[[package]] -name = "seahash" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" - -[[package]] -name = "semver" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" -dependencies = [ - "semver-parser", -] - -[[package]] -name = "semver-parser" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" -dependencies = [ - "pest", -] - -[[package]] -name = "serde" -version = "1.0.136" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_bytes" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_derive" -version = "1.0.136" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_json" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "serde_repr" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98d0516900518c29efa217c298fa1f4e6c6ffc85ae29fd7f4ee48f176e1a9ed5" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "sha2" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if 1.0.0", - "cpufeatures", - "digest 0.9.0", - "opaque-debug", -] - -[[package]] -name = "sha2" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" -dependencies = [ - "cfg-if 1.0.0", - "cpufeatures", - "digest 0.10.3", -] - -[[package]] -name = "sha3" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" -dependencies = [ - "block-buffer 0.9.0", - "digest 0.9.0", - "keccak", - "opaque-debug", -] - -[[package]] -name = "sha3" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a31480366ec990f395a61b7c08122d99bd40544fdb5abcfc1b06bb29994312c" -dependencies = [ - "digest 0.10.3", - "keccak", -] - -[[package]] -name = "sharded-slab" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "signature" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02658e48d89f2bec991f9a78e69cfa4c316f8d6a6c4ec12fae1aeb263d486788" - -[[package]] -name = "simple-error" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc47a29ce97772ca5c927f75bac34866b16d64e07f330c3248e2d7226623901b" - -[[package]] -name = "smallvec" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" - -[[package]] -name = "sp-std" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35391ea974fa5ee869cb094d5b437688fbf3d8127d64d1b9fed5822a1ed39b12" - -[[package]] -name = "sparse-merkle-tree" -version = "0.3.1-pre" -source = "git+https://github.com/heliaxdev/sparse-merkle-tree?branch=bat/arse-merkle-tree#04ad1eeb28901b57a7599bbe433b3822965dabe8" -dependencies = [ - "borsh", - "cfg-if 1.0.0", - "ics23", - "sha2 0.9.9", -] - -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "subtle" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" - -[[package]] -name = "subtle-encoding" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dcb1ed7b8330c5eed5441052651dd7a12c75e2ed88f2ec024ae1fa3a5e59945" -dependencies = [ - "zeroize", -] - -[[package]] -name = "subtle-ng" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "734676eb262c623cec13c3155096e08d1f8f29adce39ba17948b18dad1e54142" - -[[package]] -name = "syn" -version = "1.0.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "synstructure" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "unicode-xid", -] - -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "target-lexicon" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9bffcddbc2458fa3e6058414599e3c838a022abae82e5c67b4f7f80298d5bff" - -[[package]] -name = "tempfile" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" -dependencies = [ - "cfg-if 1.0.0", - "fastrand", - "libc", - "redox_syscall", - "remove_dir_all", - "winapi", -] - -[[package]] -name = "tendermint" -version = "0.23.6" -source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc2850830f4d8028c1fe1bd9f96284#87be41b8c9cc2850830f4d8028c1fe1bd9f96284" -dependencies = [ - "async-trait", - "bytes", - "ed25519", - "ed25519-dalek", - "flex-error", - "futures", - "num-traits", - "once_cell", - "prost", - "prost-types", - "serde", - "serde_bytes", - "serde_json", - "serde_repr", - "sha2 0.9.9", - "signature", - "subtle", - "subtle-encoding", - "tendermint-proto", - "time", - "zeroize", -] - -[[package]] -name = "tendermint-light-client-verifier" -version = "0.23.6" -source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc2850830f4d8028c1fe1bd9f96284#87be41b8c9cc2850830f4d8028c1fe1bd9f96284" -dependencies = [ - "derive_more", - "flex-error", - "serde", - "tendermint", - "time", -] - -[[package]] -name = "tendermint-proto" -version = "0.23.6" -source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc2850830f4d8028c1fe1bd9f96284#87be41b8c9cc2850830f4d8028c1fe1bd9f96284" -dependencies = [ - "bytes", - "flex-error", - "num-derive", - "num-traits", - "prost", - "prost-types", - "serde", - "serde_bytes", - "subtle-encoding", - "time", -] - -[[package]] -name = "tendermint-testgen" -version = "0.23.6" -source = "git+https://github.com/heliaxdev/tendermint-rs.git?rev=87be41b8c9cc2850830f4d8028c1fe1bd9f96284#87be41b8c9cc2850830f4d8028c1fe1bd9f96284" -dependencies = [ - "ed25519-dalek", - "gumdrop", - "serde", - "serde_json", - "simple-error", - "tempfile", - "tendermint", - "time", -] - -[[package]] -name = "test-log" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb78caec569a40f42c078c798c0e35b922d9054ec28e166f0d6ac447563d91a4" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "thiserror" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "thread_local" -version = "1.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" -dependencies = [ - "once_cell", -] - -[[package]] -name = "time" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "004cbc98f30fa233c61a38bc77e96a9106e65c88f2d3bef182ae952027e5753d" -dependencies = [ - "libc", - "num_threads", - "time-macros", -] - -[[package]] -name = "time-macros" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25eb0ca3468fc0acc11828786797f6ef9aa1555e4a211a60d64cc8e4d1be47d6" - -[[package]] -name = "tiny-keccak" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" -dependencies = [ - "crunchy", -] - -[[package]] -name = "toml" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" -dependencies = [ - "serde", -] - -[[package]] -name = "tonic-build" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9403f1bafde247186684b230dc6f38b5cd514584e8bec1dd32514be4745fa757" -dependencies = [ - "proc-macro2", - "prost-build", - "quote", - "syn", -] - -[[package]] -name = "tracing" -version = "0.1.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0ecdcb44a79f0fe9844f0c4f33a342cbcbb5117de8001e6ba0dc2351327d09" -dependencies = [ - "cfg-if 1.0.0", - "log", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tracing-core" -version = "0.1.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23" -dependencies = [ - "lazy_static", - "valuable", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e0ab7bdc962035a87fba73f3acca9b8a8d0034c2e6f60b84aeaaddddc155dce" -dependencies = [ - "lazy_static", - "matchers", - "regex", - "sharded-slab", - "thread_local", - "tracing", - "tracing-core", -] - -[[package]] -name = "typenum" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" - -[[package]] -name = "ucd-trie" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" - -[[package]] -name = "uint" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f03af7ccf01dd611cc450a0d10dbc9b745770d096473e2faf0ca6e2d66d1e0" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - -[[package]] -name = "unicode-segmentation" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" - -[[package]] -name = "unicode-width" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" - -[[package]] -name = "unicode-xid" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" - -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "vp_template" -version = "0.7.1" -dependencies = [ - "borsh", - "getrandom", - "namada_tests", - "namada_vp_prelude", - "wee_alloc", -] - -[[package]] -name = "wait-timeout" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" -dependencies = [ - "libc", -] - -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - -[[package]] -name = "wasm-bindgen" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" -dependencies = [ - "cfg-if 1.0.0", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" -dependencies = [ - "bumpalo", - "lazy_static", - "log", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" - -[[package]] -name = "wasmer" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfc7dff846db3f38f8ed0be4a009fdfeb729cf1f94a2c7fb6ff2fec01cefa110" -dependencies = [ - "cfg-if 1.0.0", - "indexmap", - "js-sys", - "loupe", - "more-asserts", - "target-lexicon", - "thiserror", - "wasm-bindgen", - "wasmer-compiler", - "wasmer-compiler-cranelift", - "wasmer-derive", - "wasmer-engine", - "wasmer-engine-dylib", - "wasmer-engine-universal", - "wasmer-types", - "wasmer-vm", - "wat", - "winapi", -] - -[[package]] -name = "wasmer-cache" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "834a0de78bf30b9bce61c4c236344b9d8f2f4a3b7713f8de8a8274fbc2d4e9d5" -dependencies = [ - "blake3", - "hex", - "thiserror", - "wasmer", -] - -[[package]] -name = "wasmer-compiler" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c91abf22b16dad3826ec0d0e3ec0a8304262a6c7a14e16528c536131b80e63d" -dependencies = [ - "enumset", - "loupe", - "rkyv", - "serde", - "serde_bytes", - "smallvec", - "target-lexicon", - "thiserror", - "wasmer-types", - "wasmer-vm", - "wasmparser 0.78.2", -] - -[[package]] -name = "wasmer-compiler-cranelift" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7624a1f496b163139a7e0b442426cad805bec70486900287506f9d15a29323ab" -dependencies = [ - "cranelift-codegen", - "cranelift-entity", - "cranelift-frontend", - "gimli 0.25.0", - "loupe", - "more-asserts", - "rayon", - "smallvec", - "target-lexicon", - "tracing", - "wasmer-compiler", - "wasmer-types", - "wasmer-vm", -] - -[[package]] -name = "wasmer-compiler-singlepass" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b63c1538ffb4b0e09edaebfcac35c34141d5944c52f77d137cbe0b634bd40fa" -dependencies = [ - "byteorder", - "dynasm", - "dynasmrt", - "lazy_static", - "loupe", - "more-asserts", - "rayon", - "smallvec", - "wasmer-compiler", - "wasmer-types", - "wasmer-vm", -] - -[[package]] -name = "wasmer-derive" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "933b23b5cee0f58aa6c17c6de7e1f3007279357e0d555f22e24d6b395cfe7f89" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "wasmer-engine" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41db0ac4df90610cda8320cfd5abf90c6ec90e298b6fe5a09a81dff718b55640" -dependencies = [ - "backtrace", - "enumset", - "lazy_static", - "loupe", - "memmap2", - "more-asserts", - "rustc-demangle", - "serde", - "serde_bytes", - "target-lexicon", - "thiserror", - "wasmer-compiler", - "wasmer-types", - "wasmer-vm", -] - -[[package]] -name = "wasmer-engine-dylib" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "591683f3356ac31cc88aaecaf77ac2cc9f456014348b01af46c164f44f531162" -dependencies = [ - "cfg-if 1.0.0", - "enum-iterator", - "enumset", - "leb128", - "libloading", - "loupe", - "object 0.28.3", - "rkyv", - "serde", - "tempfile", - "tracing", - "wasmer-compiler", - "wasmer-engine", - "wasmer-object", - "wasmer-types", - "wasmer-vm", - "which", -] - -[[package]] -name = "wasmer-engine-universal" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dccfde103e9b87427099a6de344b7c791574f307d035c8c7dbbc00974c1af0c1" -dependencies = [ - "cfg-if 1.0.0", - "enum-iterator", - "enumset", - "leb128", - "loupe", - "region", - "rkyv", - "wasmer-compiler", - "wasmer-engine", - "wasmer-types", - "wasmer-vm", - "winapi", -] - -[[package]] -name = "wasmer-object" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d0c4005592998bd840f2289102ef9c67b6138338ed78e1fc0809586aa229040" -dependencies = [ - "object 0.28.3", - "thiserror", - "wasmer-compiler", - "wasmer-types", -] - -[[package]] -name = "wasmer-types" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4deb854f178265a76b59823c41547d259c65da3687b606b0b9c12d80ab950e3e" -dependencies = [ - "indexmap", - "loupe", - "rkyv", - "serde", - "thiserror", -] - -[[package]] -name = "wasmer-vm" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dbc5c989cb14a102433927e630473da52f83d82c469acd5cfa8fc7efacc1e70" -dependencies = [ - "backtrace", - "cc", - "cfg-if 1.0.0", - "enum-iterator", - "indexmap", - "libc", - "loupe", - "memoffset", - "more-asserts", - "region", - "rkyv", - "serde", - "thiserror", - "wasmer-types", - "winapi", -] - -[[package]] -name = "wasmparser" -version = "0.78.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52144d4c78e5cf8b055ceab8e5fa22814ce4315d6002ad32cfd914f37c12fd65" - -[[package]] -name = "wasmparser" -version = "0.83.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "718ed7c55c2add6548cca3ddd6383d738cd73b892df400e96b9aa876f0141d7a" - -[[package]] -name = "wast" -version = "39.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9bbbd53432b267421186feee3e52436531fa69a7cfee9403f5204352df3dd05" -dependencies = [ - "leb128", - "memchr", - "unicode-width", -] - -[[package]] -name = "wat" -version = "1.0.41" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab98ed25494f97c69f28758617f27c3e92e5336040b5c3a14634f2dd3fe61830" -dependencies = [ - "wast", -] - -[[package]] -name = "wee_alloc" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb3b5a6b2bb17cb6ad44a2e68a43e8d2722c997da10e928665c72ec6c0a0b8e" -dependencies = [ - "cfg-if 0.1.10", - "libc", - "memory_units", - "winapi", -] - -[[package]] -name = "which" -version = "4.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a5a7e487e921cf220206864a94a89b6c6905bfc19f1057fa26a4cb360e5c1d2" -dependencies = [ - "either", - "lazy_static", - "libc", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "wyz" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b31594f29d27036c383b53b59ed3476874d518f0efb151b27a4c275141390e" -dependencies = [ - "tap", -] - -[[package]] -name = "zeroize" -version = "1.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f8f187641dad4f680d25c4bfc4225b418165984179f26ca76ec4fb6441d3a17" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] diff --git a/wasm/vp_template/Cargo.toml b/wasm/vp_template/Cargo.toml index 97ec78b64a..ca5ff03922 100644 --- a/wasm/vp_template/Cargo.toml +++ b/wasm/vp_template/Cargo.toml @@ -4,7 +4,7 @@ edition = "2021" license = "GPL-3.0" name = "vp_template" resolver = "2" -version = "0.7.1" +version = "0.8.1" [lib] crate-type = ["cdylib"] @@ -17,28 +17,3 @@ getrandom = { version = "0.2", features = ["custom"] } [dev-dependencies] namada_tests = {path = "../../tests"} - -[patch.crates-io] -# TODO temp patch for , and more tba. -borsh = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} -borsh-derive = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} -borsh-derive-internal = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} -borsh-schema-derive-internal = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} - -# patched to a commit on the `eth-bridge-integration` branch of our fork -tendermint = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} -tendermint-proto = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} -tendermint-testgen = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} -tendermint-light-client-verifier = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} - -# patched to a commit on the `eth-bridge-integration` branch of our fork -ibc = {git = "https://github.com/heliaxdev/ibc-rs.git", rev = "f4703dfe2c1f25cc431279ab74f10f3e0f6827e2"} -ibc-proto = {git = "https://github.com/heliaxdev/ibc-rs.git", rev = "f4703dfe2c1f25cc431279ab74f10f3e0f6827e2"} - -[profile.release] -# smaller and faster wasm (https://rustwasm.github.io/book/reference/code-size.html#compiling-with-link-time-optimizations-lto) -lto = true -# simply terminate on panics, no unwinding -panic = "abort" -# tell llvm to optimize for size (https://rustwasm.github.io/book/reference/code-size.html#tell-llvm-to-optimize-for-size-instead-of-speed) -opt-level = 'z' diff --git a/wasm/vp_template/src/lib.rs b/wasm/vp_template/src/lib.rs index 7918072266..35cdabd1c5 100644 --- a/wasm/vp_template/src/lib.rs +++ b/wasm/vp_template/src/lib.rs @@ -2,25 +2,25 @@ use namada_vp_prelude::*; #[validity_predicate] fn validate_tx( + ctx: &Ctx, tx_data: Vec, addr: Address, keys_changed: BTreeSet, verifiers: BTreeSet
, -) -> bool { +) -> VpResult { log_string(format!( "validate_tx called with addr: {}, key_changed: {:#?}, tx_data: \ {:#?}, verifiers: {:?}", addr, keys_changed, tx_data, verifiers )); - for key in keys_changed.iter() { - let key = key.to_string(); - let pre: Option = read_pre(&key); - let post: Option = read_post(&key); + for key in keys_changed { + let pre: Option = ctx.read_pre(&key)?; + let post: Option = ctx.read_post(&key)?; log_string(format!( "validate_tx key: {}, pre: {:#?}, post: {:#?}", key, pre, post, )); } - true + accept() } diff --git a/wasm/wasm_source/Cargo.toml b/wasm/wasm_source/Cargo.toml index ddbc38276b..5190e9e598 100644 --- a/wasm/wasm_source/Cargo.toml +++ b/wasm/wasm_source/Cargo.toml @@ -4,7 +4,7 @@ edition = "2021" license = "GPL-3.0" name = "namada_wasm" resolver = "2" -version = "0.7.1" +version = "0.8.1" [lib] crate-type = ["cdylib"] @@ -49,28 +49,3 @@ namada_vp_prelude = {path = "../../vp_prelude"} proptest = {git = "https://github.com/heliaxdev/proptest", branch = "tomas/sm"} tracing = "0.1.30" tracing-subscriber = {version = "0.3.7", default-features = false, features = ["env-filter", "fmt"]} - -[patch.crates-io] -# TODO temp patch for , and more tba. -borsh = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} -borsh-derive = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} -borsh-derive-internal = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} -borsh-schema-derive-internal = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} - -# patched to a commit on the `eth-bridge-integration` branch of our fork -tendermint = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} -tendermint-proto = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} -tendermint-testgen = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} -tendermint-light-client-verifier = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} - -# patched to a commit on the `eth-bridge-integration` branch of our fork -ibc = {git = "https://github.com/heliaxdev/ibc-rs.git", rev = "f4703dfe2c1f25cc431279ab74f10f3e0f6827e2"} -ibc-proto = {git = "https://github.com/heliaxdev/ibc-rs.git", rev = "f4703dfe2c1f25cc431279ab74f10f3e0f6827e2"} - -[profile.release] -# smaller and faster wasm (https://rustwasm.github.io/book/reference/code-size.html#compiling-with-link-time-optimizations-lto) -lto = true -# simply terminate on panics, no unwinding -panic = "abort" -# tell llvm to optimize for size (https://rustwasm.github.io/book/reference/code-size.html#tell-llvm-to-optimize-for-size-instead-of-speed) -opt-level = 'z' diff --git a/wasm/wasm_source/Makefile b/wasm/wasm_source/Makefile index 43a30573d9..2ac254ed19 100644 --- a/wasm/wasm_source/Makefile +++ b/wasm/wasm_source/Makefile @@ -7,7 +7,6 @@ nightly := $(shell cat ../../rust-nightly-version) # Wasms can be added via the Cargo.toml `[features]` list. wasms := tx_bond wasms += tx_bridge_pool -wasms += tx_from_intent wasms += tx_ibc wasms += tx_init_account wasms += tx_init_nft @@ -24,9 +23,13 @@ wasms += vp_testnet_faucet wasms += vp_token wasms += vp_user -# Build all wasms +# Build all wasms in release mode all: $(wasms) +# Build all wasms in debug mode +debug: + $(foreach wasm,$(wasms),make debug_$(wasm) && ) true + # `cargo check` all wasms check: $(foreach wasm,$(wasms),make check_$(wasm) && ) true @@ -51,9 +54,14 @@ fmt-check: # Build a selected wasm # Linker flag "-s" for stripping (https://github.com/rust-lang/cargo/issues/3483#issuecomment-431209957) $(wasms): %: - RUSTFLAGS='-C link-arg=-s' $(cargo) build --release --target wasm32-unknown-unknown --features $@ && \ + RUSTFLAGS='-C link-arg=-s' $(cargo) build --release --target wasm32-unknown-unknown --target-dir 'target' --features $@ && \ cp "./target/wasm32-unknown-unknown/release/namada_wasm.wasm" ../$@.wasm +# Build a selected wasm in debug mode +$(patsubst %,debug_%,$(wasms)): debug_%: + RUSTFLAGS='-C link-arg=-s' $(cargo) build --target wasm32-unknown-unknown --target-dir 'target' --features $* && \ + cp "../target/wasm32-unknown-unknown/debug/anoma_wasm.wasm" ../$*.wasm + # `cargo check` one of the wasms, e.g. `make check_tx_transfer` $(patsubst %,check_%,$(wasms)): check_%: $(cargo) check --target wasm32-unknown-unknown --features $* @@ -79,4 +87,4 @@ clean: deps: $(rustup) target add wasm32-unknown-unknown -.PHONY : all check test clippy fmt fmt-check clean deps +.PHONY : all debug check test clippy fmt fmt-check clean deps diff --git a/wasm/wasm_source/README.md b/wasm/wasm_source/README.md index b3142156a4..423e29d034 100644 --- a/wasm/wasm_source/README.md +++ b/wasm/wasm_source/README.md @@ -13,7 +13,8 @@ make all # Each source that is included here can also be build and checked individually, e.g. for "tx_transfer" source: -make tx_transfer # build +make tx_transfer # optimized build (strips `debug_log!` statements) +make debug_tx_transfer # debug build make check_tx_transfer # cargo check make test_tx_transfer # cargo test make watch_tx_transfer # cargo watch diff --git a/wasm/wasm_source/proptest-regressions/tx_bond.txt b/wasm/wasm_source/proptest-regressions/tx_bond.txt new file mode 100644 index 0000000000..3a88756618 --- /dev/null +++ b/wasm/wasm_source/proptest-regressions/tx_bond.txt @@ -0,0 +1 @@ +cc e54347c5114ef29538127ba9ad68d1572af839ec63c015318fc0827818853a22 diff --git a/wasm/wasm_source/src/lib.rs b/wasm/wasm_source/src/lib.rs index 3a674ad97b..8929992754 100644 --- a/wasm/wasm_source/src/lib.rs +++ b/wasm/wasm_source/src/lib.rs @@ -1,7 +1,5 @@ #[cfg(feature = "tx_bond")] pub mod tx_bond; -#[cfg(feature = "tx_from_intent")] -pub mod tx_from_intent; #[cfg(feature = "tx_ibc")] pub mod tx_ibc; #[cfg(feature = "tx_init_account")] diff --git a/wasm/wasm_source/src/tx_bond.rs b/wasm/wasm_source/src/tx_bond.rs index 9a5309f927..18a96ad60d 100644 --- a/wasm/wasm_source/src/tx_bond.rs +++ b/wasm/wasm_source/src/tx_bond.rs @@ -1,19 +1,361 @@ //! A tx for a PoS bond that stakes tokens via a self-bond or delegation. -use namada_tx_prelude::proof_of_stake::bond_tokens; use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - let bond = - transaction::pos::Bond::try_from_slice(&signed.data.unwrap()[..]) - .unwrap(); - - if let Err(err) = - bond_tokens(bond.source.as_ref(), &bond.validator, bond.amount) - { - debug_log!("Bond failed with: {}", err); - panic!() +fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; + let data = signed.data.ok_or_err_msg("Missing data")?; + let bond = transaction::pos::Bond::try_from_slice(&data[..]) + .wrap_err("failed to decode Bond")?; + + ctx.bond_tokens(bond.source.as_ref(), &bond.validator, bond.amount) +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use namada::ledger::pos::PosParams; + use namada::proto::Tx; + use namada::types::storage::Epoch; + use namada_tests::log::test; + use namada_tests::native_vp::pos::init_pos; + use namada_tests::native_vp::TestNativeVpEnv; + use namada_tests::tx::*; + use namada_tx_prelude::address::testing::{ + arb_established_address, arb_non_internal_address, + }; + use namada_tx_prelude::address::InternalAddress; + use namada_tx_prelude::key::testing::arb_common_keypair; + use namada_tx_prelude::key::RefTo; + use namada_tx_prelude::proof_of_stake::parameters::testing::arb_pos_params; + use namada_tx_prelude::token; + use namada_vp_prelude::proof_of_stake::types::{ + Bond, VotingPower, VotingPowerDelta, + }; + use namada_vp_prelude::proof_of_stake::{ + staking_token_address, BondId, GenesisValidator, PosVP, + }; + use proptest::prelude::*; + + use super::*; + + proptest! { + /// In this test we setup the ledger and PoS system with an arbitrary + /// initial state with 1 genesis validator and arbitrary PoS parameters. We then + /// generate an arbitrary bond that we'd like to apply. + /// + /// After we apply the bond, we check that all the storage values + /// in PoS system have been updated as expected and then we also check + /// that this transaction is accepted by the PoS validity predicate. + #[test] + fn test_tx_bond( + (initial_stake, bond) in arb_initial_stake_and_bond(), + // A key to sign the transaction + key in arb_common_keypair(), + pos_params in arb_pos_params()) { + test_tx_bond_aux(initial_stake, bond, key, pos_params).unwrap() + } + } + + fn test_tx_bond_aux( + initial_stake: token::Amount, + bond: transaction::pos::Bond, + key: key::common::SecretKey, + pos_params: PosParams, + ) -> TxResult { + let is_delegation = matches!( + &bond.source, Some(source) if *source != bond.validator); + let staking_reward_address = address::testing::established_address_1(); + let consensus_key = key::testing::keypair_1().ref_to(); + let staking_reward_key = key::testing::keypair_2().ref_to(); + let eth_cold_key = key::testing::keypair_3().ref_to(); + let eth_hot_key = key::testing::keypair_4().ref_to(); + + let genesis_validators = [GenesisValidator { + address: bond.validator.clone(), + staking_reward_address, + tokens: initial_stake, + consensus_key, + staking_reward_key, + eth_cold_key, + eth_hot_key, + }]; + + init_pos(&genesis_validators[..], &pos_params, Epoch(0)); + + tx_host_env::with(|tx_env| { + if let Some(source) = &bond.source { + tx_env.spawn_accounts([source]); + } + + // Ensure that the bond's source has enough tokens for the bond + let target = bond.source.as_ref().unwrap_or(&bond.validator); + tx_env.credit_tokens(target, &staking_token_address(), bond.amount); + }); + + let tx_code = vec![]; + let tx_data = bond.try_to_vec().unwrap(); + let tx = Tx::new(tx_code, Some(tx_data)); + let signed_tx = tx.sign(&key); + let tx_data = signed_tx.data.unwrap(); + + // Read the data before the tx is executed + let pos_balance_key = token::balance_key( + &staking_token_address(), + &Address::Internal(InternalAddress::PoS), + ); + let pos_balance_pre: token::Amount = ctx() + .read(&pos_balance_key)? + .expect("PoS must have balance"); + assert_eq!(pos_balance_pre, initial_stake); + let total_voting_powers_pre = ctx().read_total_voting_power()?; + let validator_sets_pre = ctx().read_validator_set()?; + let validator_voting_powers_pre = + ctx().read_validator_voting_power(&bond.validator)?.unwrap(); + + apply_tx(ctx(), tx_data)?; + + // Read the data after the tx is executed + + // The following storage keys should be updated: + + // - `#{PoS}/validator/#{validator}/total_deltas` + let total_delta_post = + ctx().read_validator_total_deltas(&bond.validator)?; + for epoch in 0..pos_params.pipeline_len { + assert_eq!( + total_delta_post.as_ref().unwrap().get(epoch), + Some(initial_stake.into()), + "The total deltas before the pipeline offset must not change \ + - checking in epoch: {epoch}" + ); + } + for epoch in pos_params.pipeline_len..=pos_params.unbonding_len { + let expected_stake = + i128::from(initial_stake) + i128::from(bond.amount); + assert_eq!( + total_delta_post.as_ref().unwrap().get(epoch), + Some(expected_stake), + "The total deltas at and after the pipeline offset epoch must \ + be incremented by the bonded amount - checking in epoch: \ + {epoch}" + ); + } + + // - `#{staking_token}/balance/#{PoS}` + let pos_balance_post: token::Amount = + ctx().read(&pos_balance_key)?.unwrap(); + assert_eq!(pos_balance_pre + bond.amount, pos_balance_post); + + // - `#{PoS}/bond/#{owner}/#{validator}` + let bond_src = bond + .source + .clone() + .unwrap_or_else(|| bond.validator.clone()); + let bond_id = BondId { + validator: bond.validator.clone(), + source: bond_src, + }; + let bonds_post = ctx().read_bond(&bond_id)?.unwrap(); + + if is_delegation { + // A delegation is applied at pipeline offset + for epoch in 0..pos_params.pipeline_len { + let bond: Option> = bonds_post.get(epoch); + assert!( + bond.is_none(), + "Delegation before pipeline offset should be empty - \ + checking epoch {epoch}, got {bond:#?}" + ); + } + for epoch in pos_params.pipeline_len..=pos_params.unbonding_len { + let start_epoch = + namada_tx_prelude::proof_of_stake::types::Epoch::from( + pos_params.pipeline_len, + ); + let expected_bond = + HashMap::from_iter([(start_epoch, bond.amount)]); + let bond: Bond = bonds_post.get(epoch).unwrap(); + assert_eq!( + bond.pos_deltas, expected_bond, + "Delegation at and after pipeline offset should be equal \ + to the bonded amount - checking epoch {epoch}" + ); + } + } else { + let genesis_epoch = + namada_tx_prelude::proof_of_stake::types::Epoch::from(0); + // It was a self-bond + for epoch in 0..pos_params.pipeline_len { + let expected_bond = + HashMap::from_iter([(genesis_epoch, initial_stake)]); + let bond: Bond = bonds_post + .get(epoch) + .expect("Genesis validator should already have self-bond"); + assert_eq!( + bond.pos_deltas, expected_bond, + "Self-bond before pipeline offset should be equal to the \ + genesis initial stake - checking epoch {epoch}" + ); + } + for epoch in pos_params.pipeline_len..=pos_params.unbonding_len { + let start_epoch = + namada_tx_prelude::proof_of_stake::types::Epoch::from( + pos_params.pipeline_len, + ); + let expected_bond = HashMap::from_iter([ + (genesis_epoch, initial_stake), + (start_epoch, bond.amount), + ]); + let bond: Bond = bonds_post.get(epoch).unwrap(); + assert_eq!( + bond.pos_deltas, expected_bond, + "Self-bond at and after pipeline offset should contain \ + genesis stake and the bonded amount - checking epoch \ + {epoch}" + ); + } + } + + // If the voting power from validator's initial stake is different + // from the voting power after the bond is applied, we expect the + // following 3 fields to be updated: + // - `#{PoS}/total_voting_power` (optional) + // - `#{PoS}/validator_set` (optional) + // - `#{PoS}/validator/#{validator}/voting_power` (optional) + let total_voting_powers_post = ctx().read_total_voting_power()?; + let validator_sets_post = ctx().read_validator_set()?; + let validator_voting_powers_post = + ctx().read_validator_voting_power(&bond.validator)?.unwrap(); + + let voting_power_pre = + VotingPower::from_tokens(initial_stake, &pos_params); + let voting_power_post = + VotingPower::from_tokens(initial_stake + bond.amount, &pos_params); + if voting_power_pre == voting_power_post { + // None of the optional storage fields should have been updated + assert_eq!(total_voting_powers_pre, total_voting_powers_post); + assert_eq!(validator_sets_pre, validator_sets_post); + assert_eq!( + validator_voting_powers_pre, + validator_voting_powers_post + ); + } else { + for epoch in 0..pos_params.pipeline_len { + let total_voting_power_pre = total_voting_powers_pre.get(epoch); + let total_voting_power_post = + total_voting_powers_post.get(epoch); + assert_eq!( + total_voting_power_pre, total_voting_power_post, + "Total voting power before pipeline offset must not \ + change - checking epoch {epoch}" + ); + + let validator_set_pre = validator_sets_pre.get(epoch); + let validator_set_post = validator_sets_post.get(epoch); + assert_eq!( + validator_set_pre, validator_set_post, + "Validator set before pipeline offset must not change - \ + checking epoch {epoch}" + ); + + let validator_voting_power_pre = + validator_voting_powers_pre.get(epoch); + let validator_voting_power_post = + validator_voting_powers_post.get(epoch); + assert_eq!( + validator_voting_power_pre, validator_voting_power_post, + "Validator's voting power before pipeline offset must not \ + change - checking epoch {epoch}" + ); + } + for epoch in pos_params.pipeline_len..=pos_params.unbonding_len { + let total_voting_power_pre = + total_voting_powers_pre.get(epoch).unwrap(); + let total_voting_power_post = + total_voting_powers_post.get(epoch).unwrap(); + assert_ne!( + total_voting_power_pre, total_voting_power_post, + "Total voting power at and after pipeline offset must \ + have changed - checking epoch {epoch}" + ); + + let validator_set_pre = validator_sets_pre.get(epoch).unwrap(); + let validator_set_post = + validator_sets_post.get(epoch).unwrap(); + assert_ne!( + validator_set_pre, validator_set_post, + "Validator set at and after pipeline offset must have \ + changed - checking epoch {epoch}" + ); + + let validator_voting_power_pre = + validator_voting_powers_pre.get(epoch).unwrap(); + let validator_voting_power_post = + validator_voting_powers_post.get(epoch).unwrap(); + assert_ne!( + validator_voting_power_pre, validator_voting_power_post, + "Validator's voting power at and after pipeline offset \ + must have changed - checking epoch {epoch}" + ); + + // Expected voting power from the model ... + let expected_validator_voting_power: VotingPowerDelta = + voting_power_post.try_into().unwrap(); + // ... must match the voting power read from storage + assert_eq!( + validator_voting_power_post, + expected_validator_voting_power + ); + } + } + + // Use the tx_env to run PoS VP + let tx_env = tx_host_env::take(); + let vp_env = TestNativeVpEnv::from_tx_env(tx_env, address::POS); + let result = vp_env.validate_tx(PosVP::new); + let result = + result.expect("Validation of valid changes must not fail!"); + assert!( + result, + "PoS Validity predicate must accept this transaction" + ); + Ok(()) + } + + prop_compose! { + /// Generates an initial validator stake and a bond, while making sure + /// that the `initial_stake + bond.amount <= u64::MAX` to avoid + /// overflow. + fn arb_initial_stake_and_bond() + // Generate initial stake + (initial_stake in token::testing::arb_amount()) + // Use the initial stake to limit the bond amount + (bond in arb_bond(u64::MAX - u64::from(initial_stake)), + // Use the generated initial stake too + initial_stake in Just(initial_stake), + ) -> (token::Amount, transaction::pos::Bond) { + (initial_stake, bond) + } + } + + fn arb_bond( + max_amount: u64, + ) -> impl Strategy { + ( + arb_established_address(), + prop::option::of(arb_non_internal_address()), + token::testing::arb_amount_ceiled(max_amount), + ) + .prop_map(|(validator, source, amount)| { + transaction::pos::Bond { + validator: Address::Established(validator), + amount, + source, + } + }) } } diff --git a/wasm/wasm_source/src/tx_from_intent.rs b/wasm/wasm_source/src/tx_from_intent.rs deleted file mode 100644 index f86c412353..0000000000 --- a/wasm/wasm_source/src/tx_from_intent.rs +++ /dev/null @@ -1,36 +0,0 @@ -//! A tx for a token transfer crafted by matchmaker from intents. -//! This tx uses `intent::IntentTransfers` wrapped inside -//! `SignedTxData` as its input as declared in `shared` crate. - -use namada_tx_prelude::*; - -#[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - - let tx_data = - intent::IntentTransfers::try_from_slice(&signed.data.unwrap()[..]); - - let tx_data = tx_data.unwrap(); - - // make sure that the matchmaker has to validate this tx - insert_verifier(&tx_data.source); - - for token::Transfer { - source, - target, - token, - sub_prefix, - amount, - } in tx_data.matches.transfers - { - token::transfer(&source, &target, &token, sub_prefix, amount); - } - - tx_data - .matches - .exchanges - .values() - .into_iter() - .for_each(intent::invalidate_exchange); -} diff --git a/wasm/wasm_source/src/tx_ibc.rs b/wasm/wasm_source/src/tx_ibc.rs index e38aa2f856..79cbc6cf96 100644 --- a/wasm/wasm_source/src/tx_ibc.rs +++ b/wasm/wasm_source/src/tx_ibc.rs @@ -6,7 +6,9 @@ use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - Ibc.dispatch(&signed.data.unwrap()).unwrap() +fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; + let data = signed.data.ok_or_err_msg("Missing data")?; + ctx.dispatch_ibc_action(&data) } diff --git a/wasm/wasm_source/src/tx_init_account.rs b/wasm/wasm_source/src/tx_init_account.rs index e976c38941..e0fe700d63 100644 --- a/wasm/wasm_source/src/tx_init_account.rs +++ b/wasm/wasm_source/src/tx_init_account.rs @@ -4,14 +4,16 @@ use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - let tx_data = - transaction::InitAccount::try_from_slice(&signed.data.unwrap()[..]) - .unwrap(); +fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; + let data = signed.data.ok_or_err_msg("Missing data")?; + let tx_data = transaction::InitAccount::try_from_slice(&data[..]) + .wrap_err("failed to decode InitAccount")?; debug_log!("apply_tx called to init a new established account"); - let address = init_account(&tx_data.vp_code); + let address = ctx.init_account(&tx_data.vp_code)?; let pk_key = key::pk_key(&address); - write(&pk_key.to_string(), &tx_data.public_key); + ctx.write(&pk_key, &tx_data.public_key)?; + Ok(()) } diff --git a/wasm/wasm_source/src/tx_init_nft.rs b/wasm/wasm_source/src/tx_init_nft.rs index e26d656b57..de67dfbb53 100644 --- a/wasm/wasm_source/src/tx_init_nft.rs +++ b/wasm/wasm_source/src/tx_init_nft.rs @@ -3,12 +3,14 @@ use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - let tx_data = - transaction::nft::CreateNft::try_from_slice(&signed.data.unwrap()[..]) - .unwrap(); +fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; + let data = signed.data.ok_or_err_msg("Missing data")?; + let tx_data = transaction::nft::CreateNft::try_from_slice(&data[..]) + .wrap_err("failed to decode CreateNft")?; log_string("apply_tx called to create a new NFT"); - nft::init_nft(tx_data); + let _address = nft::init_nft(ctx, tx_data)?; + Ok(()) } diff --git a/wasm/wasm_source/src/tx_init_proposal.rs b/wasm/wasm_source/src/tx_init_proposal.rs index 3cb1c3d5de..cb7fe9ffbb 100644 --- a/wasm/wasm_source/src/tx_init_proposal.rs +++ b/wasm/wasm_source/src/tx_init_proposal.rs @@ -3,13 +3,14 @@ use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - let tx_data = transaction::governance::InitProposalData::try_from_slice( - &signed.data.unwrap()[..], - ) - .unwrap(); +fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; + let data = signed.data.ok_or_err_msg("Missing data")?; + let tx_data = + transaction::governance::InitProposalData::try_from_slice(&data[..]) + .wrap_err("failed to decode InitProposalData")?; log_string("apply_tx called to create a new governance proposal"); - governance::init_proposal(tx_data); + governance::init_proposal(ctx, tx_data) } diff --git a/wasm/wasm_source/src/tx_init_validator.rs b/wasm/wasm_source/src/tx_init_validator.rs index 79dfedad56..3f6c6b6bb4 100644 --- a/wasm/wasm_source/src/tx_init_validator.rs +++ b/wasm/wasm_source/src/tx_init_validator.rs @@ -5,14 +5,17 @@ use namada_tx_prelude::transaction::InitValidator; use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - let init_validator = - InitValidator::try_from_slice(&signed.data.unwrap()[..]).unwrap(); +fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; + let data = signed.data.ok_or_err_msg("Missing data")?; + let init_validator = InitValidator::try_from_slice(&data[..]) + .wrap_err("failed to decode InitValidator")?; + log_string(&format!("InitValidator: {:#?}", init_validator)); debug_log!("apply_tx called to init a new validator account"); // Register the validator in PoS - match proof_of_stake::init_validator(init_validator) { + match ctx.init_validator(init_validator) { Ok((validator_address, staking_reward_address)) => { debug_log!( "Created validator {} and staking reward account {}", @@ -25,4 +28,5 @@ fn apply_tx(tx_data: Vec) { panic!() } } + Ok(()) } diff --git a/wasm/wasm_source/src/tx_mint_nft.rs b/wasm/wasm_source/src/tx_mint_nft.rs index 692155432c..d3ab17e7ad 100644 --- a/wasm/wasm_source/src/tx_mint_nft.rs +++ b/wasm/wasm_source/src/tx_mint_nft.rs @@ -3,12 +3,13 @@ use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - let tx_data = - transaction::nft::MintNft::try_from_slice(&signed.data.unwrap()[..]) - .unwrap(); +fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; + let data = signed.data.ok_or_err_msg("Missing data")?; + let tx_data = transaction::nft::MintNft::try_from_slice(&data[..]) + .wrap_err("failed to decode MintNft")?; log_string("apply_tx called to mint a new NFT tokens"); - nft::mint_tokens(tx_data); + nft::mint_tokens(ctx, tx_data) } diff --git a/wasm/wasm_source/src/tx_transfer.rs b/wasm/wasm_source/src/tx_transfer.rs index 059a3296e1..4a3475128d 100644 --- a/wasm/wasm_source/src/tx_transfer.rs +++ b/wasm/wasm_source/src/tx_transfer.rs @@ -5,10 +5,12 @@ use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - let transfer = - token::Transfer::try_from_slice(&signed.data.unwrap()[..]).unwrap(); +fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; + let data = signed.data.ok_or_err_msg("Missing data")?; + let transfer = token::Transfer::try_from_slice(&data[..]) + .wrap_err("failed to decode token::Transfer")?; debug_log!("apply_tx called with transfer: {:#?}", transfer); let token::Transfer { source, @@ -17,5 +19,5 @@ fn apply_tx(tx_data: Vec) { sub_prefix, amount, } = transfer; - token::transfer(&source, &target, &token, sub_prefix, amount) + token::transfer(ctx, &source, &target, &token, sub_prefix, amount) } diff --git a/wasm/wasm_source/src/tx_unbond.rs b/wasm/wasm_source/src/tx_unbond.rs index 5d2662ed5c..851329b908 100644 --- a/wasm/wasm_source/src/tx_unbond.rs +++ b/wasm/wasm_source/src/tx_unbond.rs @@ -1,20 +1,419 @@ //! A tx for a PoS unbond that removes staked tokens from a self-bond or a //! delegation to be withdrawn in or after unbonding epoch. -use namada_tx_prelude::proof_of_stake::unbond_tokens; use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - let unbond = - transaction::pos::Unbond::try_from_slice(&signed.data.unwrap()[..]) +fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; + let data = signed.data.ok_or_err_msg("Missing data")?; + let unbond = transaction::pos::Unbond::try_from_slice(&data[..]) + .wrap_err("failed to decode Unbond")?; + + ctx.unbond_tokens(unbond.source.as_ref(), &unbond.validator, unbond.amount) +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use namada::ledger::pos::PosParams; + use namada::proto::Tx; + use namada::types::storage::Epoch; + use namada_tests::log::test; + use namada_tests::native_vp::pos::init_pos; + use namada_tests::native_vp::TestNativeVpEnv; + use namada_tests::tx::*; + use namada_tx_prelude::address::InternalAddress; + use namada_tx_prelude::key::testing::arb_common_keypair; + use namada_tx_prelude::key::RefTo; + use namada_tx_prelude::proof_of_stake::parameters::testing::arb_pos_params; + use namada_tx_prelude::token; + use namada_vp_prelude::proof_of_stake::types::{ + Bond, Unbond, VotingPower, VotingPowerDelta, + }; + use namada_vp_prelude::proof_of_stake::{ + staking_token_address, BondId, GenesisValidator, PosVP, + }; + use proptest::prelude::*; + + use super::*; + + proptest! { + /// In this test we setup the ledger and PoS system with an arbitrary + /// initial state with 1 genesis validator, a delegation bond if the + /// unbond is for a delegation, arbitrary PoS parameters, and + /// we generate an arbitrary unbond that we'd like to apply. + /// + /// After we apply the unbond, we check that all the storage values + /// in PoS system have been updated as expected and then we also check + /// that this transaction is accepted by the PoS validity predicate. + #[test] + fn test_tx_unbond( + (initial_stake, unbond) in arb_initial_stake_and_unbond(), + // A key to sign the transaction + key in arb_common_keypair(), + pos_params in arb_pos_params()) { + test_tx_unbond_aux(initial_stake, unbond, key, pos_params).unwrap() + } + } + + fn test_tx_unbond_aux( + initial_stake: token::Amount, + unbond: transaction::pos::Unbond, + key: key::common::SecretKey, + pos_params: PosParams, + ) -> TxResult { + let is_delegation = matches!( + &unbond.source, Some(source) if *source != unbond.validator); + let staking_reward_address = address::testing::established_address_1(); + let consensus_key = key::testing::keypair_1().ref_to(); + let staking_reward_key = key::testing::keypair_2().ref_to(); + let eth_cold_key = key::testing::keypair_3().ref_to(); + let eth_hot_key = key::testing::keypair_4().ref_to(); + + let genesis_validators = [GenesisValidator { + address: unbond.validator.clone(), + staking_reward_address, + tokens: if is_delegation { + // If we're unbonding a delegation, we'll give the initial stake + // to the delegation instead of the validator + token::Amount::default() + } else { + initial_stake + }, + consensus_key, + staking_reward_key, + eth_cold_key, + eth_hot_key, + }]; + + init_pos(&genesis_validators[..], &pos_params, Epoch(0)); + + tx_host_env::with(|tx_env| { + if is_delegation { + let source = unbond.source.as_ref().unwrap(); + tx_env.spawn_accounts([source]); + + // To allow to unbond delegation, there must be a delegation + // bond first. + // First, credit the bond's source with the initial stake, + // before we initialize the bond below + tx_env.credit_tokens( + source, + &staking_token_address(), + initial_stake, + ); + } + }); + + if is_delegation { + // Initialize the delegation - unlike genesis validator's self-bond, + // this happens at pipeline offset + ctx().bond_tokens( + unbond.source.as_ref(), + &unbond.validator, + initial_stake, + )?; + } + tx_host_env::commit_tx_and_block(); + + let tx_code = vec![]; + let tx_data = unbond.try_to_vec().unwrap(); + let tx = Tx::new(tx_code, Some(tx_data)); + let signed_tx = tx.sign(&key); + let tx_data = signed_tx.data.unwrap(); + + let unbond_src = unbond + .source + .clone() + .unwrap_or_else(|| unbond.validator.clone()); + let unbond_id = BondId { + validator: unbond.validator.clone(), + source: unbond_src, + }; + + let pos_balance_key = token::balance_key( + &staking_token_address(), + &Address::Internal(InternalAddress::PoS), + ); + let pos_balance_pre: token::Amount = ctx() + .read(&pos_balance_key)? + .expect("PoS must have balance"); + assert_eq!(pos_balance_pre, initial_stake); + let total_voting_powers_pre = ctx().read_total_voting_power()?; + let validator_sets_pre = ctx().read_validator_set()?; + let validator_voting_powers_pre = ctx() + .read_validator_voting_power(&unbond.validator)? + .unwrap(); + let bonds_pre = ctx().read_bond(&unbond_id)?.unwrap(); + dbg!(&bonds_pre); + + apply_tx(ctx(), tx_data)?; + + // Read the data after the tx is executed + + // The following storage keys should be updated: + + // - `#{PoS}/validator/#{validator}/total_deltas` + let total_delta_post = + ctx().read_validator_total_deltas(&unbond.validator)?; + + let expected_deltas_at_pipeline = if is_delegation { + // When this is a delegation, there will be no bond until pipeline + 0.into() + } else { + // Before pipeline offset, there can only be self-bond + initial_stake + }; + + // Before pipeline offset, there can only be self-bond for genesis + // validator. In case of a delegation the state is setup so that there + // is no bond until pipeline offset. + for epoch in 0..pos_params.pipeline_len { + assert_eq!( + total_delta_post.as_ref().unwrap().get(epoch), + Some(expected_deltas_at_pipeline.into()), + "The total deltas before the pipeline offset must not change \ + - checking in epoch: {epoch}" + ); + } + + // At and after pipeline offset, there can be either delegation or + // self-bond, both of which are initialized to the same `initial_stake` + for epoch in pos_params.pipeline_len..pos_params.unbonding_len { + assert_eq!( + total_delta_post.as_ref().unwrap().get(epoch), + Some(initial_stake.into()), + "The total deltas before the unbonding offset must not change \ + - checking in epoch: {epoch}" + ); + } + + { + let epoch = pos_params.unbonding_len + 1; + let expected_stake = + i128::from(initial_stake) - i128::from(unbond.amount); + assert_eq!( + total_delta_post.as_ref().unwrap().get(epoch), + Some(expected_stake), + "The total deltas after the unbonding offset epoch must be \ + decremented by the unbonded amount - checking in epoch: \ + {epoch}" + ); + } + + // - `#{staking_token}/balance/#{PoS}` + let pos_balance_post: token::Amount = + ctx().read(&pos_balance_key)?.unwrap(); + assert_eq!( + pos_balance_pre, pos_balance_post, + "Unbonding doesn't affect PoS system balance" + ); + + // - `#{PoS}/unbond/#{owner}/#{validator}` + let unbonds_post = ctx().read_unbond(&unbond_id)?.unwrap(); + let bonds_post = ctx().read_bond(&unbond_id)?.unwrap(); + for epoch in 0..pos_params.unbonding_len { + let unbond: Option> = unbonds_post.get(epoch); + + assert!( + unbond.is_none(), + "There should be no unbond until unbonding offset - checking \ + epoch {epoch}" + ); + } + let start_epoch = match &unbond.source { + Some(_) => { + // This bond was a delegation + namada_tx_prelude::proof_of_stake::types::Epoch::from( + pos_params.pipeline_len, + ) + } + None => { + // This bond was a genesis validator self-bond + namada_tx_prelude::proof_of_stake::types::Epoch::default() + } + }; + let end_epoch = namada_tx_prelude::proof_of_stake::types::Epoch::from( + pos_params.unbonding_len - 1, + ); + + let expected_unbond = + HashMap::from_iter([((start_epoch, end_epoch), unbond.amount)]); + let actual_unbond: Unbond = + unbonds_post.get(pos_params.unbonding_len).unwrap(); + assert_eq!( + actual_unbond.deltas, expected_unbond, + "Delegation at unbonding offset should be equal to the unbonded \ + amount" + ); + + for epoch in pos_params.pipeline_len..pos_params.unbonding_len { + let bond: Bond = bonds_post.get(epoch).unwrap(); + let expected_bond = + HashMap::from_iter([(start_epoch, initial_stake)]); + assert_eq!( + bond.pos_deltas, expected_bond, + "Before unbonding offset, the bond should be untouched, \ + checking epoch {epoch}" + ); + } + { + let epoch = pos_params.unbonding_len + 1; + let bond: Bond = bonds_post.get(epoch).unwrap(); + let expected_bond = + HashMap::from_iter([(start_epoch, initial_stake)]); + assert_eq!( + bond.pos_deltas, expected_bond, + "At unbonding offset, the pos deltas should not change, \ + checking epoch {epoch}" + ); + assert_eq!( + bond.neg_deltas, unbond.amount, + "At unbonding offset, the unbonded amount should have been \ + deducted, checking epoch {epoch}" + ) + } + // If the voting power from validator's initial stake is different + // from the voting power after the bond is applied, we expect the + // following 3 fields to be updated: + // - `#{PoS}/total_voting_power` (optional) + // - `#{PoS}/validator_set` (optional) + // - `#{PoS}/validator/#{validator}/voting_power` (optional) + let total_voting_powers_post = ctx().read_total_voting_power()?; + let validator_sets_post = ctx().read_validator_set()?; + let validator_voting_powers_post = ctx() + .read_validator_voting_power(&unbond.validator)? .unwrap(); - if let Err(err) = - unbond_tokens(unbond.source.as_ref(), &unbond.validator, unbond.amount) - { - debug_log!("Unbonding failed with: {}", err); - panic!() + let voting_power_pre = + VotingPower::from_tokens(initial_stake, &pos_params); + let voting_power_post = VotingPower::from_tokens( + initial_stake - unbond.amount, + &pos_params, + ); + if voting_power_pre == voting_power_post { + // None of the optional storage fields should have been updated + assert_eq!(total_voting_powers_pre, total_voting_powers_post); + assert_eq!(validator_sets_pre, validator_sets_post); + assert_eq!( + validator_voting_powers_pre, + validator_voting_powers_post + ); + } else { + for epoch in 0..pos_params.unbonding_len { + let total_voting_power_pre = total_voting_powers_pre.get(epoch); + let total_voting_power_post = + total_voting_powers_post.get(epoch); + assert_eq!( + total_voting_power_pre, total_voting_power_post, + "Total voting power before pipeline offset must not \ + change - checking epoch {epoch}" + ); + + let validator_set_pre = validator_sets_pre.get(epoch); + let validator_set_post = validator_sets_post.get(epoch); + assert_eq!( + validator_set_pre, validator_set_post, + "Validator set before pipeline offset must not change - \ + checking epoch {epoch}" + ); + + let validator_voting_power_pre = + validator_voting_powers_pre.get(epoch); + let validator_voting_power_post = + validator_voting_powers_post.get(epoch); + assert_eq!( + validator_voting_power_pre, validator_voting_power_post, + "Validator's voting power before pipeline offset must not \ + change - checking epoch {epoch}" + ); + } + { + let epoch = pos_params.unbonding_len; + let total_voting_power_pre = + total_voting_powers_pre.get(epoch).unwrap(); + let total_voting_power_post = + total_voting_powers_post.get(epoch).unwrap(); + assert_ne!( + total_voting_power_pre, total_voting_power_post, + "Total voting power at and after pipeline offset must \ + have changed - checking epoch {epoch}" + ); + + let validator_set_pre = validator_sets_pre.get(epoch).unwrap(); + let validator_set_post = + validator_sets_post.get(epoch).unwrap(); + assert_ne!( + validator_set_pre, validator_set_post, + "Validator set at and after pipeline offset must have \ + changed - checking epoch {epoch}" + ); + + let validator_voting_power_pre = + validator_voting_powers_pre.get(epoch).unwrap(); + let validator_voting_power_post = + validator_voting_powers_post.get(epoch).unwrap(); + assert_ne!( + validator_voting_power_pre, validator_voting_power_post, + "Validator's voting power at and after pipeline offset \ + must have changed - checking epoch {epoch}" + ); + + // Expected voting power from the model ... + let expected_validator_voting_power: VotingPowerDelta = + voting_power_post.try_into().unwrap(); + // ... must match the voting power read from storage + assert_eq!( + validator_voting_power_post, + expected_validator_voting_power + ); + } + } + + // Use the tx_env to run PoS VP + let tx_env = tx_host_env::take(); + let vp_env = TestNativeVpEnv::from_tx_env(tx_env, address::POS); + let result = vp_env.validate_tx(PosVP::new); + let result = + result.expect("Validation of valid changes must not fail!"); + assert!( + result, + "PoS Validity predicate must accept this transaction" + ); + Ok(()) + } + + fn arb_initial_stake_and_unbond() + -> impl Strategy { + // Generate initial stake + token::testing::arb_amount().prop_flat_map(|initial_stake| { + // Use the initial stake to limit the bond amount + let unbond = arb_unbond(u64::from(initial_stake)); + // Use the generated initial stake too too + (Just(initial_stake), unbond) + }) + } + + /// Generates an initial validator stake and a unbond, while making sure + /// that the `initial_stake >= unbond.amount`. + fn arb_unbond( + max_amount: u64, + ) -> impl Strategy { + ( + address::testing::arb_established_address(), + prop::option::of(address::testing::arb_non_internal_address()), + token::testing::arb_amount_ceiled(max_amount), + ) + .prop_map(|(validator, source, amount)| { + let validator = Address::Established(validator); + transaction::pos::Unbond { + validator, + amount, + source, + } + }) } } diff --git a/wasm/wasm_source/src/tx_update_vp.rs b/wasm/wasm_source/src/tx_update_vp.rs index 4b68f11170..0bb819f026 100644 --- a/wasm/wasm_source/src/tx_update_vp.rs +++ b/wasm/wasm_source/src/tx_update_vp.rs @@ -5,11 +5,14 @@ use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - let update_vp = - transaction::UpdateVp::try_from_slice(&signed.data.unwrap()[..]) - .unwrap(); +fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; + let data = signed.data.ok_or_err_msg("Missing data")?; + let update_vp = transaction::UpdateVp::try_from_slice(&data[..]) + .wrap_err("failed to decode UpdateVp")?; + debug_log!("update VP for: {:#?}", update_vp.addr); - update_validity_predicate(&update_vp.addr, update_vp.vp_code) + + ctx.update_validity_predicate(&update_vp.addr, update_vp.vp_code) } diff --git a/wasm/wasm_source/src/tx_vote_proposal.rs b/wasm/wasm_source/src/tx_vote_proposal.rs index cae8c4ef33..92c7af4c7f 100644 --- a/wasm/wasm_source/src/tx_vote_proposal.rs +++ b/wasm/wasm_source/src/tx_vote_proposal.rs @@ -3,13 +3,15 @@ use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - let tx_data = transaction::governance::VoteProposalData::try_from_slice( - &signed.data.unwrap()[..], - ) - .unwrap(); - log_string("apply_tx called to vote a governance proposal"); - - governance::vote_proposal(tx_data); +fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; + let data = signed.data.ok_or_err_msg("Missing data")?; + let tx_data = + transaction::governance::VoteProposalData::try_from_slice(&data[..]) + .wrap_err("failed to decode VoteProposalData")?; + + debug_log!("apply_tx called to vote a governance proposal"); + + governance::vote_proposal(ctx, tx_data) } diff --git a/wasm/wasm_source/src/tx_withdraw.rs b/wasm/wasm_source/src/tx_withdraw.rs index 27bd984a66..675b079609 100644 --- a/wasm/wasm_source/src/tx_withdraw.rs +++ b/wasm/wasm_source/src/tx_withdraw.rs @@ -1,23 +1,229 @@ //! A tx for a PoS unbond that removes staked tokens from a self-bond or a //! delegation to be withdrawn in or after unbonding epoch. -use namada_tx_prelude::proof_of_stake::withdraw_tokens; use namada_tx_prelude::*; #[transaction] -fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); - let withdraw = - transaction::pos::Withdraw::try_from_slice(&signed.data.unwrap()[..]) - .unwrap(); - - match withdraw_tokens(withdraw.source.as_ref(), &withdraw.validator) { - Ok(slashed) => { - debug_log!("Withdrawal slashed for {}", slashed); +fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; + let data = signed.data.ok_or_err_msg("Missing data")?; + let withdraw = transaction::pos::Withdraw::try_from_slice(&data[..]) + .wrap_err("failed to decode Withdraw")?; + + let slashed = + ctx.withdraw_tokens(withdraw.source.as_ref(), &withdraw.validator)?; + if slashed != token::Amount::default() { + debug_log!("Withdrawal slashed for {}", slashed); + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use namada::ledger::pos::PosParams; + use namada::proto::Tx; + use namada::types::storage::Epoch; + use namada_tests::log::test; + use namada_tests::native_vp::pos::init_pos; + use namada_tests::native_vp::TestNativeVpEnv; + use namada_tests::tx::*; + use namada_tx_prelude::address::testing::{ + arb_established_address, arb_non_internal_address, + }; + use namada_tx_prelude::address::InternalAddress; + use namada_tx_prelude::key::testing::arb_common_keypair; + use namada_tx_prelude::key::RefTo; + use namada_tx_prelude::proof_of_stake::parameters::testing::arb_pos_params; + use namada_vp_prelude::proof_of_stake::{ + staking_token_address, BondId, GenesisValidator, PosVP, + }; + use proptest::prelude::*; + + use super::*; + + proptest! { + /// In this test we setup the ledger and PoS system with an arbitrary + /// initial state with 1 genesis validator, a delegation bond if the + /// withdrawal is for a delegation, arbitrary PoS parameters and + /// a we generate an arbitrary withdrawal that we'd like to apply. + /// + /// After we apply the withdrawal, we're checking that all the storage + /// values in PoS system have been updated as expected and then we also + /// check that this transaction is accepted by the PoS validity + /// predicate. + #[test] + fn test_tx_withdraw( + (initial_stake, unbonded_amount) in arb_initial_stake_and_unbonded_amount(), + withdraw in arb_withdraw(), + // A key to sign the transaction + key in arb_common_keypair(), + pos_params in arb_pos_params()) { + test_tx_withdraw_aux(initial_stake, unbonded_amount, withdraw, key, + pos_params).unwrap() } - Err(err) => { - debug_log!("Withdrawal failed with: {}", err); - panic!() + } + + fn test_tx_withdraw_aux( + initial_stake: token::Amount, + unbonded_amount: token::Amount, + withdraw: transaction::pos::Withdraw, + key: key::common::SecretKey, + pos_params: PosParams, + ) -> TxResult { + let is_delegation = matches!( + &withdraw.source, Some(source) if *source != withdraw.validator); + let staking_reward_address = address::testing::established_address_1(); + let consensus_key = key::testing::keypair_1().ref_to(); + let staking_reward_key = key::testing::keypair_2().ref_to(); + let eth_cold_key = key::testing::keypair_3().ref_to(); + let eth_hot_key = key::testing::keypair_4().ref_to(); + + let genesis_validators = [GenesisValidator { + address: withdraw.validator.clone(), + staking_reward_address, + tokens: if is_delegation { + // If we're withdrawing a delegation, we'll give the initial + // stake to the delegation instead of the + // validator + token::Amount::default() + } else { + initial_stake + }, + consensus_key, + staking_reward_key, + eth_cold_key, + eth_hot_key, + }]; + + init_pos(&genesis_validators[..], &pos_params, Epoch(0)); + + tx_host_env::with(|tx_env| { + if is_delegation { + let source = withdraw.source.as_ref().unwrap(); + tx_env.spawn_accounts([source]); + + // To allow to unbond delegation, there must be a delegation + // bond first. + // First, credit the bond's source with the initial stake, + // before we initialize the bond below + tx_env.credit_tokens( + source, + &staking_token_address(), + initial_stake, + ); + } + }); + + if is_delegation { + // Initialize the delegation - unlike genesis validator's self-bond, + // this happens at pipeline offset + ctx().bond_tokens( + withdraw.source.as_ref(), + &withdraw.validator, + initial_stake, + )?; } + + // Unbond the `unbonded_amount` at the starting epoch 0 + ctx().unbond_tokens( + withdraw.source.as_ref(), + &withdraw.validator, + unbonded_amount, + )?; + + tx_host_env::commit_tx_and_block(); + + // Fast forward to unbonding offset epoch so that it's possible to + // withdraw the unbonded tokens + tx_host_env::with(|env| { + for _ in 0..pos_params.unbonding_len { + env.storage.block.epoch = env.storage.block.epoch.next(); + } + }); + assert_eq!( + tx_host_env::with(|env| env.storage.block.epoch), + Epoch(pos_params.unbonding_len) + ); + + let tx_code = vec![]; + let tx_data = withdraw.try_to_vec().unwrap(); + let tx = Tx::new(tx_code, Some(tx_data)); + let signed_tx = tx.sign(&key); + let tx_data = signed_tx.data.unwrap(); + + // Read data before we apply tx: + let pos_balance_key = token::balance_key( + &staking_token_address(), + &Address::Internal(InternalAddress::PoS), + ); + let pos_balance_pre: token::Amount = ctx() + .read(&pos_balance_key)? + .expect("PoS must have balance"); + assert_eq!(pos_balance_pre, initial_stake); + let unbond_src = withdraw + .source + .clone() + .unwrap_or_else(|| withdraw.validator.clone()); + let unbond_id = BondId { + validator: withdraw.validator, + source: unbond_src, + }; + let unbonds_pre = ctx().read_unbond(&unbond_id)?.unwrap(); + assert_eq!( + unbonds_pre.get(pos_params.unbonding_len).unwrap().sum(), + unbonded_amount + ); + + apply_tx(ctx(), tx_data)?; + + // Read the data after the tx is executed + let unbonds_post = ctx().read_unbond(&unbond_id)?; + assert!( + unbonds_post.is_none(), + "Because we're withdraw the full unbonded amount, there should be \ + no unbonds left" + ); + let pos_balance_post: token::Amount = ctx() + .read(&pos_balance_key)? + .expect("PoS must have balance"); + assert_eq!(pos_balance_pre - pos_balance_post, unbonded_amount); + + // Use the tx_env to run PoS VP + let tx_env = tx_host_env::take(); + let vp_env = TestNativeVpEnv::from_tx_env(tx_env, address::POS); + let result = vp_env.validate_tx(PosVP::new); + let result = + result.expect("Validation of valid changes must not fail!"); + assert!( + result, + "PoS Validity predicate must accept this transaction" + ); + Ok(()) + } + + fn arb_initial_stake_and_unbonded_amount() + -> impl Strategy { + // Generate initial stake + token::testing::arb_amount().prop_flat_map(|initial_stake| { + // Use the initial stake to limit the unbonded amount from the stake + let unbonded_amount = + token::testing::arb_amount_ceiled(initial_stake.into()); + // Use the generated initial stake too too + (Just(initial_stake), unbonded_amount) + }) + } + + fn arb_withdraw() -> impl Strategy { + ( + arb_established_address(), + prop::option::of(arb_non_internal_address()), + ) + .prop_map(|(validator, source)| { + transaction::pos::Withdraw { + validator: Address::Established(validator), + source, + } + }) } } diff --git a/wasm/wasm_source/src/vp_nft.rs b/wasm/wasm_source/src/vp_nft.rs index f1e6dd587b..77a9df8306 100644 --- a/wasm/wasm_source/src/vp_nft.rs +++ b/wasm/wasm_source/src/vp_nft.rs @@ -4,33 +4,36 @@ use namada_vp_prelude::*; #[validity_predicate] fn validate_tx( + ctx: &Ctx, tx_data: Vec, addr: Address, keys_changed: BTreeSet, verifiers: BTreeSet
, -) -> bool { +) -> VpResult { log_string(format!( "validate_tx called with token addr: {}, key_changed: {:#?}, \ verifiers: {:?}", addr, keys_changed, verifiers )); - if !is_tx_whitelisted() { - return false; + if !is_valid_tx(ctx, &tx_data)? { + return reject(); } - let vp_check = - keys_changed - .iter() - .all(|key| match key.is_validity_predicate() { - Some(_) => { - let vp: Vec = read_bytes_post(key.to_string()).unwrap(); - is_vp_whitelisted(&vp) + let vp_check = keys_changed.iter().all(|key| { + if key.is_validity_predicate().is_some() { + match ctx.read_bytes_post(key) { + Ok(Some(vp)) => { + matches!(is_vp_whitelisted(ctx, &vp), Ok(true)) } - None => true, - }); - - vp_check && nft::vp(tx_data, &addr, &keys_changed, &verifiers) + _ => false, + } + } else { + true + } + }); + + Ok(vp_check && nft::vp(ctx, tx_data, &addr, &keys_changed, &verifiers)?) } #[cfg(test)] @@ -38,8 +41,9 @@ mod tests { use namada::types::nft::{self, NftToken}; use namada::types::transaction::nft::{CreateNft, MintNft}; use namada_tests::log::test; - use namada_tests::tx::{tx_host_env, TestTxEnv}; + use namada_tests::tx::{self, tx_host_env, TestTxEnv}; use namada_tests::vp::*; + use namada_tx_prelude::{StorageWrite, TxEnv}; use super::*; @@ -59,21 +63,25 @@ mod tests { std::fs::read(VP_ALWAYS_TRUE_WASM).expect("cannot load wasm"); tx_host_env::set(tx_env); - let nft_address = tx_host_env::nft::init_nft(CreateNft { - tag: "v1".to_string(), - creator: nft_creator.clone(), - vp_code, - keys: vec![], - opt_keys: vec![], - tokens: vec![], - }); + let nft_address = tx_host_env::nft::init_nft( + tx::ctx(), + CreateNft { + tag: "v1".to_string(), + creator: nft_creator.clone(), + vp_code, + keys: vec![], + opt_keys: vec![], + tokens: vec![], + }, + ) + .unwrap(); let mut tx_env = tx_host_env::take(); tx_env.write_log.commit_tx(); vp_host_env::init_from_tx(nft_address.clone(), tx_env, |address| { // Apply transfer in a transaction - tx_host_env::insert_verifier(address) + tx::ctx().insert_verifier(address).unwrap() }); let vp_env = vp_host_env::take(); @@ -82,7 +90,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= vp_env.get_verifiers(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, nft_address, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, nft_address, keys_changed, verifiers) + .unwrap() + ); } /// Test that you can create an nft without tokens @@ -98,26 +109,34 @@ mod tests { std::fs::read(VP_ALWAYS_TRUE_WASM).expect("cannot load wasm"); tx_host_env::set(tx_env); - let nft_address = tx_host_env::nft::init_nft(CreateNft { - tag: "v1".to_string(), - creator: nft_creator.clone(), - vp_code, - keys: vec![], - opt_keys: vec![], - tokens: vec![], - }); + let nft_address = tx_host_env::nft::init_nft( + tx::ctx(), + CreateNft { + tag: "v1".to_string(), + creator: nft_creator.clone(), + vp_code, + keys: vec![], + opt_keys: vec![], + tokens: vec![], + }, + ) + .unwrap(); let mut tx_env = tx_host_env::take(); tx_env.write_log.commit_tx(); vp_host_env::init_from_tx(nft_address.clone(), tx_env, |address| { // Apply transfer in a transaction - tx_host_env::nft::mint_tokens(MintNft { - address: nft_address.clone(), - tokens: vec![], - creator: nft_creator.clone(), - }); - tx_host_env::insert_verifier(address) + tx_host_env::nft::mint_tokens( + tx::ctx(), + MintNft { + address: nft_address.clone(), + tokens: vec![], + creator: nft_creator.clone(), + }, + ) + .unwrap(); + tx::ctx().insert_verifier(address).unwrap() }); let vp_env = vp_host_env::take(); @@ -127,7 +146,10 @@ mod tests { let verifiers: BTreeSet
= vp_env.get_verifiers(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, nft_address, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, nft_address, keys_changed, verifiers) + .unwrap() + ); } /// Test that you can create an nft with tokens @@ -144,34 +166,42 @@ mod tests { std::fs::read(VP_ALWAYS_TRUE_WASM).expect("cannot load wasm"); tx_host_env::set(tx_env); - let nft_address = tx_host_env::nft::init_nft(CreateNft { - tag: "v1".to_string(), - creator: nft_creator.clone(), - vp_code, - keys: vec![], - opt_keys: vec![], - tokens: vec![], - }); + let nft_address = tx_host_env::nft::init_nft( + tx::ctx(), + CreateNft { + tag: "v1".to_string(), + creator: nft_creator.clone(), + vp_code, + keys: vec![], + opt_keys: vec![], + tokens: vec![], + }, + ) + .unwrap(); let mut tx_env = tx_host_env::take(); tx_env.commit_tx_and_block(); vp_host_env::init_from_tx(nft_address.clone(), tx_env, |_| { // Apply transfer in a transaction - tx_host_env::nft::mint_tokens(MintNft { - address: nft_address.clone(), - creator: nft_creator.clone(), - tokens: vec![NftToken { - id: 1, - values: vec![], - opt_values: vec![], - metadata: "".to_string(), - approvals: vec![], - current_owner: Some(nft_token_owner.clone()), - past_owners: vec![], - burnt: false, - }], - }); + tx_host_env::nft::mint_tokens( + tx::ctx(), + MintNft { + address: nft_address.clone(), + creator: nft_creator.clone(), + tokens: vec![NftToken { + id: 1, + values: vec![], + opt_values: vec![], + metadata: "".to_string(), + approvals: vec![], + current_owner: Some(nft_token_owner.clone()), + past_owners: vec![], + burnt: false, + }], + }, + ) + .unwrap(); }); let vp_env = vp_host_env::take(); @@ -181,7 +211,10 @@ mod tests { let verifiers: BTreeSet
= vp_env.get_verifiers(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, nft_address, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, nft_address, keys_changed, verifiers) + .unwrap() + ); } /// Test that only owner can mint new tokens @@ -198,34 +231,42 @@ mod tests { std::fs::read(VP_ALWAYS_TRUE_WASM).expect("cannot load wasm"); tx_host_env::set(tx_env); - let nft_address = tx_host_env::nft::init_nft(CreateNft { - tag: "v1".to_string(), - creator: nft_creator.clone(), - vp_code, - keys: vec![], - opt_keys: vec![], - tokens: vec![], - }); + let nft_address = tx_host_env::nft::init_nft( + tx::ctx(), + CreateNft { + tag: "v1".to_string(), + creator: nft_creator.clone(), + vp_code, + keys: vec![], + opt_keys: vec![], + tokens: vec![], + }, + ) + .unwrap(); let mut tx_env = tx_host_env::take(); tx_env.commit_tx_and_block(); vp_host_env::init_from_tx(nft_address.clone(), tx_env, |_| { // Apply transfer in a transaction - tx_host_env::nft::mint_tokens(MintNft { - address: nft_address.clone(), - creator: nft_token_owner.clone(), - tokens: vec![NftToken { - id: 1, - values: vec![], - opt_values: vec![], - metadata: "".to_string(), - approvals: vec![], - current_owner: Some(nft_token_owner.clone()), - past_owners: vec![], - burnt: false, - }], - }); + tx_host_env::nft::mint_tokens( + tx::ctx(), + MintNft { + address: nft_address.clone(), + creator: nft_token_owner.clone(), + tokens: vec![NftToken { + id: 1, + values: vec![], + opt_values: vec![], + metadata: "".to_string(), + approvals: vec![], + current_owner: Some(nft_token_owner.clone()), + past_owners: vec![], + burnt: false, + }], + }, + ) + .unwrap(); }); let vp_env = vp_host_env::take(); @@ -235,7 +276,10 @@ mod tests { let verifiers: BTreeSet
= vp_env.get_verifiers(); vp_host_env::set(vp_env); - assert!(!validate_tx(tx_data, nft_address, keys_changed, verifiers)); + assert!( + !validate_tx(&CTX, tx_data, nft_address, keys_changed, verifiers) + .unwrap() + ); } /// Test that an approval can add another approval @@ -259,45 +303,54 @@ mod tests { std::fs::read(VP_ALWAYS_TRUE_WASM).expect("cannot load wasm"); tx_host_env::set(tx_env); - let nft_address = tx_host_env::nft::init_nft(CreateNft { - tag: "v1".to_string(), - creator: nft_creator.clone(), - vp_code, - keys: vec![], - opt_keys: vec![], - tokens: vec![], - }); + let nft_address = tx_host_env::nft::init_nft( + tx::ctx(), + CreateNft { + tag: "v1".to_string(), + creator: nft_creator.clone(), + vp_code, + keys: vec![], + opt_keys: vec![], + tokens: vec![], + }, + ) + .unwrap(); let mut tx_env = tx_host_env::take(); tx_env.commit_tx_and_block(); tx_host_env::set(tx_env); - tx_host_env::nft::mint_tokens(MintNft { - address: nft_address.clone(), - creator: nft_creator.clone(), - tokens: vec![NftToken { - id: 1, - values: vec![], - opt_values: vec![], - metadata: "".to_string(), - approvals: vec![nft_token_approval.clone()], - current_owner: None, - past_owners: vec![], - burnt: false, - }], - }); + tx_host_env::nft::mint_tokens( + tx::ctx(), + MintNft { + address: nft_address.clone(), + creator: nft_creator.clone(), + tokens: vec![NftToken { + id: 1, + values: vec![], + opt_values: vec![], + metadata: "".to_string(), + approvals: vec![nft_token_approval.clone()], + current_owner: None, + past_owners: vec![], + burnt: false, + }], + }, + ) + .unwrap(); let mut tx_env = tx_host_env::take(); tx_env.commit_tx_and_block(); vp_host_env::init_from_tx(nft_address.clone(), tx_env, |_| { - let approval_key = - nft::get_token_approval_key(&nft_address, "1").to_string(); - tx_host_env::write( - approval_key, - [&nft_token_approval_2, &nft_token_approval], - ); - tx_host_env::insert_verifier(&nft_token_approval); + let approval_key = nft::get_token_approval_key(&nft_address, "1"); + tx::ctx() + .write( + &approval_key, + [&nft_token_approval_2, &nft_token_approval], + ) + .unwrap(); + tx::ctx().insert_verifier(&nft_token_approval).unwrap(); }); let vp_env = vp_host_env::take(); @@ -307,7 +360,10 @@ mod tests { let verifiers: BTreeSet
= vp_env.get_verifiers(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, nft_address, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, nft_address, keys_changed, verifiers) + .unwrap() + ); } /// Test that an approval can add another approval @@ -331,45 +387,54 @@ mod tests { std::fs::read(VP_ALWAYS_TRUE_WASM).expect("cannot load wasm"); tx_host_env::set(tx_env); - let nft_address = tx_host_env::nft::init_nft(CreateNft { - tag: "v1".to_string(), - creator: nft_creator.clone(), - vp_code, - keys: vec![], - opt_keys: vec![], - tokens: vec![], - }); + let nft_address = tx_host_env::nft::init_nft( + tx::ctx(), + CreateNft { + tag: "v1".to_string(), + creator: nft_creator.clone(), + vp_code, + keys: vec![], + opt_keys: vec![], + tokens: vec![], + }, + ) + .unwrap(); let mut tx_env = tx_host_env::take(); tx_env.commit_tx_and_block(); tx_host_env::set(tx_env); - tx_host_env::nft::mint_tokens(MintNft { - address: nft_address.clone(), - creator: nft_creator.clone(), - tokens: vec![NftToken { - id: 1, - values: vec![], - opt_values: vec![], - metadata: "".to_string(), - approvals: vec![nft_token_approval.clone()], - current_owner: None, - past_owners: vec![], - burnt: false, - }], - }); + tx_host_env::nft::mint_tokens( + tx::ctx(), + MintNft { + address: nft_address.clone(), + creator: nft_creator.clone(), + tokens: vec![NftToken { + id: 1, + values: vec![], + opt_values: vec![], + metadata: "".to_string(), + approvals: vec![nft_token_approval.clone()], + current_owner: None, + past_owners: vec![], + burnt: false, + }], + }, + ) + .unwrap(); let mut tx_env = tx_host_env::take(); tx_env.commit_tx_and_block(); vp_host_env::init_from_tx(nft_address.clone(), tx_env, |_| { - let approval_key = - nft::get_token_approval_key(&nft_address, "1").to_string(); - tx_host_env::write( - approval_key, - [&nft_token_approval_2, &nft_token_approval], - ); - tx_host_env::insert_verifier(&nft_token_approval_2); + let approval_key = nft::get_token_approval_key(&nft_address, "1"); + tx::ctx() + .write( + &approval_key, + [&nft_token_approval_2, &nft_token_approval], + ) + .unwrap(); + tx::ctx().insert_verifier(&nft_token_approval_2).unwrap(); }); let vp_env = vp_host_env::take(); @@ -379,7 +444,10 @@ mod tests { let verifiers: BTreeSet
= vp_env.get_verifiers(); vp_host_env::set(vp_env); - assert!(!validate_tx(tx_data, nft_address, keys_changed, verifiers)); + assert!( + !validate_tx(&CTX, tx_data, nft_address, keys_changed, verifiers) + .unwrap() + ); } /// Test nft address cannot be changed @@ -396,21 +464,25 @@ mod tests { std::fs::read(VP_ALWAYS_TRUE_WASM).expect("cannot load wasm"); tx_host_env::set(tx_env); - let nft_address = tx_host_env::nft::init_nft(CreateNft { - tag: "v1".to_string(), - creator: nft_owner.clone(), - vp_code, - keys: vec![], - opt_keys: vec![], - tokens: vec![], - }); + let nft_address = tx_host_env::nft::init_nft( + tx::ctx(), + CreateNft { + tag: "v1".to_string(), + creator: nft_owner.clone(), + vp_code, + keys: vec![], + opt_keys: vec![], + tokens: vec![], + }, + ) + .unwrap(); let mut tx_env = tx_host_env::take(); tx_env.commit_tx_and_block(); vp_host_env::init_from_tx(nft_address.clone(), tx_env, |_| { - let creator_key = nft::get_creator_key(&nft_address).to_string(); - tx_host_env::write(creator_key, &another_address); + let creator_key = nft::get_creator_key(&nft_address); + tx::ctx().write(&creator_key, &another_address).unwrap(); }); let vp_env = vp_host_env::take(); @@ -420,6 +492,9 @@ mod tests { let verifiers: BTreeSet
= vp_env.get_verifiers(); vp_host_env::set(vp_env); - assert!(!validate_tx(tx_data, nft_address, keys_changed, verifiers)); + assert!( + !validate_tx(&CTX, tx_data, nft_address, keys_changed, verifiers) + .unwrap() + ); } } diff --git a/wasm/wasm_source/src/vp_testnet_faucet.rs b/wasm/wasm_source/src/vp_testnet_faucet.rs index a2f9a6267b..9582791565 100644 --- a/wasm/wasm_source/src/vp_testnet_faucet.rs +++ b/wasm/wasm_source/src/vp_testnet_faucet.rs @@ -13,11 +13,12 @@ pub const MAX_FREE_DEBIT: i128 = 1_000_000_000; // in micro units #[validity_predicate] fn validate_tx( + ctx: &Ctx, tx_data: Vec, addr: Address, keys_changed: BTreeSet, verifiers: BTreeSet
, -) -> bool { +) -> VpResult { debug_log!( "vp_testnet_faucet called with user addr: {}, key_changed: {:?}, \ verifiers: {:?}", @@ -31,26 +32,31 @@ fn validate_tx( let valid_sig = Lazy::new(|| match &*signed_tx_data { Ok(signed_tx_data) => { - let pk = key::get(&addr); + let pk = key::get(ctx, &addr); match pk { - Some(pk) => verify_tx_signature(&pk, &signed_tx_data.sig), - None => false, + Ok(Some(pk)) => { + matches!( + ctx.verify_tx_signature(&pk, &signed_tx_data.sig), + Ok(true) + ) + } + _ => false, } } _ => false, }); - if !is_tx_whitelisted() { - return false; + if !is_valid_tx(ctx, &tx_data)? { + return reject(); } for key in keys_changed.iter() { let is_valid = if let Some(owner) = token::is_any_token_balance_key(key) { if owner == &addr { - let key = key.to_string(); - let pre: token::Amount = read_pre(&key).unwrap_or_default(); - let post: token::Amount = read_post(&key).unwrap_or_default(); + let pre: token::Amount = ctx.read_pre(key)?.unwrap_or_default(); + let post: token::Amount = + ctx.read_post(key)?.unwrap_or_default(); let change = post.change() - pre.change(); // Debit over `MAX_FREE_DEBIT` has to signed, credit doesn't change >= -MAX_FREE_DEBIT || change >= 0 || *valid_sig @@ -59,18 +65,17 @@ fn validate_tx( true } } else if let Some(owner) = key.is_validity_predicate() { - let key = key.to_string(); - let has_post: bool = has_key_post(&key); + let has_post: bool = ctx.has_key_post(key)?; if owner == &addr { if has_post { - let vp: Vec = read_bytes_post(&key).unwrap(); - return *valid_sig && is_vp_whitelisted(&vp); + let vp: Vec = ctx.read_bytes_post(key)?.unwrap(); + return Ok(*valid_sig && is_vp_whitelisted(ctx, &vp)?); } else { - return false; + return reject(); } } else { - let vp: Vec = read_bytes_post(&key).unwrap(); - return is_vp_whitelisted(&vp); + let vp: Vec = ctx.read_bytes_post(key)?.unwrap(); + return is_vp_whitelisted(ctx, &vp); } } else { // Allow any other key change if authorized by a signature @@ -78,10 +83,10 @@ fn validate_tx( }; if !is_valid { debug_log!("key {} modification failed vp", key); - return false; + return reject(); } } - true + accept() } #[cfg(test)] @@ -89,9 +94,10 @@ mod tests { use address::testing::arb_non_internal_address; // Use this as `#[test]` annotation to enable logging use namada_tests::log::test; - use namada_tests::tx::{tx_host_env, TestTxEnv}; + use namada_tests::tx::{self, tx_host_env, TestTxEnv}; use namada_tests::vp::vp_host_env::storage::Key; use namada_tests::vp::*; + use namada_tx_prelude::{StorageWrite, TxEnv}; use namada_vp_prelude::key::RefTo; use proptest::prelude::*; use storage::testing::arb_account_storage_key_no_vp; @@ -112,7 +118,9 @@ mod tests { // The VP env must be initialized before calling `validate_tx` vp_host_env::init(); - assert!(validate_tx(tx_data, addr, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, addr, keys_changed, verifiers).unwrap() + ); } /// Test that a credit transfer is accepted. @@ -137,8 +145,14 @@ mod tests { vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Apply transfer in a transaction tx_host_env::token::transfer( - &source, address, &token, None, amount, - ); + tx_host_env::ctx(), + &source, + address, + &token, + None, + amount, + ) + .unwrap(); }); let vp_env = vp_host_env::take(); @@ -147,7 +161,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } /// Test that a validity predicate update without a valid signature is @@ -167,7 +184,9 @@ mod tests { // Initialize VP environment from a transaction vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Update VP in a transaction - tx_host_env::update_validity_predicate(address, &vp_code); + tx::ctx() + .update_validity_predicate(address, &vp_code) + .unwrap(); }); let vp_env = vp_host_env::take(); @@ -176,7 +195,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(!validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + !validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } /// Test that a validity predicate update with a valid signature is @@ -200,7 +222,9 @@ mod tests { // Initialize VP environment from a transaction vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Update VP in a transaction - tx_host_env::update_validity_predicate(address, &vp_code); + tx::ctx() + .update_validity_predicate(address, &vp_code) + .unwrap(); }); let mut vp_env = vp_host_env::take(); @@ -212,7 +236,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } prop_compose! { @@ -253,7 +280,7 @@ mod tests { // Initialize VP environment from a transaction vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Apply transfer in a transaction - tx_host_env::token::transfer(address, &target, &token, None, amount); + tx_host_env::token::transfer(tx::ctx(), address, &target, &token, None, amount).unwrap(); }); let vp_env = vp_host_env::take(); @@ -262,7 +289,7 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(!validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!(!validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers).unwrap()); } /// Test that a debit of less than or equal to [`MAX_FREE_DEBIT`] tokens without a valid signature is accepted. @@ -286,7 +313,7 @@ mod tests { // Initialize VP environment from a transaction vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Apply transfer in a transaction - tx_host_env::token::transfer(address, &target, &token, None, amount); + tx_host_env::token::transfer(tx::ctx(), address, &target, &token, None, amount).unwrap(); }); let vp_env = vp_host_env::take(); @@ -295,7 +322,7 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!(validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers).unwrap()); } /// Test that a signed tx that performs arbitrary storage writes or @@ -323,9 +350,9 @@ mod tests { vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |_address| { // Write or delete some data in the transaction if let Some(value) = &storage_value { - tx_host_env::write(storage_key.to_string(), value); + tx::ctx().write(&storage_key, value).unwrap(); } else { - tx_host_env::delete(storage_key.to_string()); + tx::ctx().delete(&storage_key).unwrap(); } }); @@ -338,7 +365,7 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!(validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers).unwrap()); } } } diff --git a/wasm/wasm_source/src/vp_token.rs b/wasm/wasm_source/src/vp_token.rs index 60513ce808..849e32efec 100644 --- a/wasm/wasm_source/src/vp_token.rs +++ b/wasm/wasm_source/src/vp_token.rs @@ -5,11 +5,12 @@ use namada_vp_prelude::*; #[validity_predicate] fn validate_tx( - _tx_data: Vec, + ctx: &Ctx, + tx_data: Vec, addr: Address, keys_changed: BTreeSet, verifiers: BTreeSet
, -) -> bool { +) -> VpResult { debug_log!( "validate_tx called with token addr: {}, key_changed: {:?}, \ verifiers: {:?}", @@ -18,20 +19,18 @@ fn validate_tx( verifiers ); - if !is_tx_whitelisted() { - return false; + if !is_valid_tx(ctx, &tx_data)? { + return reject(); } - let vp_check = - keys_changed - .iter() - .all(|key| match key.is_validity_predicate() { - Some(_) => { - let vp: Vec = read_bytes_post(key.to_string()).unwrap(); - is_vp_whitelisted(&vp) - } - None => true, - }); + for key in keys_changed.iter() { + if key.is_validity_predicate().is_some() { + let vp: Vec = ctx.read_bytes_post(key)?.unwrap(); + if !is_vp_whitelisted(ctx, &vp)? { + return reject(); + } + } + } - vp_check && token::vp(&addr, &keys_changed, &verifiers) + token::vp(ctx, &addr, &keys_changed, &verifiers) } diff --git a/wasm/wasm_source/src/vp_user.rs b/wasm/wasm_source/src/vp_user.rs index e1815cc0b1..256dc6bb17 100644 --- a/wasm/wasm_source/src/vp_user.rs +++ b/wasm/wasm_source/src/vp_user.rs @@ -6,24 +6,15 @@ //! It allows to bond, unbond and withdraw tokens to and from PoS system with a //! valid signature. //! -//! It allows to fulfil intents that were signed by this account's key if they -//! haven't already been fulfilled (fulfilled intents are added to the owner's -//! invalid intent set). -//! //! Any other storage key changes are allowed only with a valid signature. -use namada_vp_prelude::intent::{ - Exchange, FungibleTokenIntent, IntentTransfers, -}; use namada_vp_prelude::storage::KeySeg; use namada_vp_prelude::*; use once_cell::unsync::Lazy; -use rust_decimal::prelude::*; enum KeyType<'a> { Token(&'a Address), PoS, - InvalidIntentSet(&'a Address), Nft(&'a Address), Vp(&'a Address), GovernanceVote(&'a Address), @@ -40,8 +31,6 @@ impl<'a> From<&'a storage::Key> for KeyType<'a> { Self::Token(address) } else if proof_of_stake::is_pos_key(key) { Self::PoS - } else if let Some(address) = intent::is_invalid_intent_key(key) { - Self::InvalidIntentSet(address) } else if let Some(address) = nft::is_nft_key(key) { Self::Nft(address) } else if gov_storage::is_vote_key(key) { @@ -61,11 +50,12 @@ impl<'a> From<&'a storage::Key> for KeyType<'a> { #[validity_predicate] fn validate_tx( + ctx: &Ctx, tx_data: Vec, addr: Address, keys_changed: BTreeSet, verifiers: BTreeSet
, -) -> bool { +) -> VpResult { debug_log!( "vp_user called with user addr: {}, key_changed: {:?}, verifiers: {:?}", addr, @@ -78,22 +68,22 @@ fn validate_tx( let valid_sig = Lazy::new(|| match &*signed_tx_data { Ok(signed_tx_data) => { - let pk = key::get(&addr); + let pk = key::get(ctx, &addr); match pk { - Some(pk) => verify_tx_signature(&pk, &signed_tx_data.sig), - None => false, + Ok(Some(pk)) => { + matches!( + ctx.verify_tx_signature(&pk, &signed_tx_data.sig), + Ok(true) + ) + } + _ => false, } } _ => false, }); - let valid_intent = Lazy::new(|| match &*signed_tx_data { - Ok(signed_tx_data) => check_intent_transfers(&addr, signed_tx_data), - _ => false, - }); - - if !is_tx_whitelisted() { - return false; + if !is_valid_tx(ctx, &tx_data)? { + return reject(); } for key in keys_changed.iter() { @@ -101,20 +91,19 @@ fn validate_tx( let is_valid = match key_type { KeyType::Token(owner) => { if owner == &addr { - let key = key.to_string(); - let pre: token::Amount = read_pre(&key).unwrap_or_default(); + let pre: token::Amount = + ctx.read_pre(key)?.unwrap_or_default(); let post: token::Amount = - read_post(&key).unwrap_or_default(); + ctx.read_post(key)?.unwrap_or_default(); let change = post.change() - pre.change(); // debit has to signed, credit doesn't - let valid = change >= 0 || *valid_sig || *valid_intent; + let valid = change >= 0 || *valid_sig; debug_log!( - "token key: {}, change: {}, valid_sig: {}, \ - valid_intent: {}, valid modification: {}", + "token key: {}, change: {}, valid_sig: {}, valid \ + modification: {}", key, change, *valid_sig, - *valid_intent, valid ); valid @@ -152,27 +141,6 @@ fn validate_tx( ); valid } - KeyType::InvalidIntentSet(owner) => { - if owner == &addr { - let key = key.to_string(); - let pre: HashSet = - read_pre(&key).unwrap_or_default(); - let post: HashSet = - read_post(&key).unwrap_or_default(); - // A new invalid intent must have been added - pre.len() + 1 == post.len() - } else { - debug_log!( - "This address ({}) is not of owner ({}) of \ - InvalidIntentSet key: {}", - addr, - owner, - key - ); - // If this is not the owner, allow any change - true - } - } KeyType::Nft(owner) => { if owner == &addr { *valid_sig @@ -188,18 +156,17 @@ fn validate_tx( } } KeyType::Vp(owner) => { - let key = key.to_string(); - let has_post: bool = has_key_post(&key); + let has_post: bool = ctx.has_key_post(key)?; if owner == &addr { if has_post { - let vp: Vec = read_bytes_post(&key).unwrap(); - return *valid_sig && is_vp_whitelisted(&vp); + let vp: Vec = ctx.read_bytes_post(key)?.unwrap(); + *valid_sig && is_vp_whitelisted(ctx, &vp)? } else { - return false; + false } } else { - let vp: Vec = read_bytes_post(&key).unwrap(); - return is_vp_whitelisted(&vp); + let vp: Vec = ctx.read_bytes_post(key)?.unwrap(); + is_vp_whitelisted(ctx, &vp)? } } KeyType::Unknown => { @@ -215,153 +182,11 @@ fn validate_tx( }; if !is_valid { debug_log!("key {} modification failed vp", key); - return false; + return reject(); } } - true -} - -fn check_intent_transfers( - addr: &Address, - signed_tx_data: &SignedTxData, -) -> bool { - if let Some((raw_intent_transfers, exchange, intent)) = - try_decode_intent(addr, signed_tx_data) - { - log_string("check intent"); - return check_intent(addr, exchange, intent, raw_intent_transfers); - } - false -} - -fn try_decode_intent( - addr: &Address, - signed_tx_data: &SignedTxData, -) -> Option<( - Vec, - namada_vp_prelude::Signed, - namada_vp_prelude::Signed, -)> { - let raw_intent_transfers = signed_tx_data.data.as_ref().cloned()?; - let mut tx_data = - IntentTransfers::try_from_slice(&raw_intent_transfers[..]).ok()?; - debug_log!( - "tx_data.matches.exchanges: {:?}, {}", - tx_data.matches.exchanges, - &addr - ); - if let (Some(exchange), Some(intent)) = ( - tx_data.matches.exchanges.remove(addr), - tx_data.matches.intents.remove(addr), - ) { - return Some((raw_intent_transfers, exchange, intent)); - } else { - log_string("no intent with a matching address"); - } - None -} - -fn check_intent( - addr: &Address, - exchange: namada_vp_prelude::Signed, - intent: namada_vp_prelude::Signed, - raw_intent_transfers: Vec, -) -> bool { - // verify signature - let pk = key::get(addr); - if let Some(pk) = pk { - if intent.verify(&pk).is_err() { - log_string("invalid sig"); - return false; - } - } else { - return false; - } - - // verify the intent have not been already used - if !intent::vp_exchange(&exchange) { - return false; - } - - // verify the intent is fulfilled - let Exchange { - addr, - token_sell, - rate_min, - token_buy, - min_buy, - max_sell, - vp, - } = &exchange.data; - - debug_log!("vp is: {}", vp.is_some()); - - if let Some(code) = vp { - let eval_result = eval(code.to_vec(), raw_intent_transfers); - debug_log!("eval result: {}", eval_result); - if !eval_result { - return false; - } - } - - debug_log!( - "exchange description: {}, {}, {}, {}, {}", - token_sell, - token_buy, - max_sell.change(), - min_buy.change(), - rate_min.0 - ); - - let token_sell_key = token::balance_key(token_sell, addr).to_string(); - let mut sell_difference: token::Amount = - read_pre(&token_sell_key).unwrap_or_default(); - let sell_post: token::Amount = - read_post(token_sell_key).unwrap_or_default(); - - sell_difference.spend(&sell_post); - - let token_buy_key = token::balance_key(token_buy, addr).to_string(); - let buy_pre: token::Amount = read_pre(&token_buy_key).unwrap_or_default(); - let mut buy_difference: token::Amount = - read_post(token_buy_key).unwrap_or_default(); - - buy_difference.spend(&buy_pre); - - let sell_diff: Decimal = sell_difference.change().into(); // -> how many token I sold - let buy_diff: Decimal = buy_difference.change().into(); // -> how many token I got - - debug_log!( - "buy_diff > 0: {}, rate check: {}, max_sell > sell_diff: {}, buy_diff \ - > min_buy: {}", - buy_difference.change() > 0, - buy_diff / sell_diff >= rate_min.0, - max_sell.change() >= sell_difference.change(), - buy_diff >= min_buy.change().into() - ); - - if !(buy_difference.change() > 0 - && (buy_diff / sell_diff >= rate_min.0) - && max_sell.change() >= sell_difference.change() - && buy_diff >= min_buy.change().into()) - { - debug_log!( - "invalid exchange, {} / {}, sell diff: {}, buy diff: {}, \ - max_sell: {}, rate_min: {}, min_buy: {}, buy_diff / sell_diff: {}", - token_sell, - token_buy, - sell_difference.change(), - buy_difference.change(), - max_sell.change(), - rate_min.0, - min_buy.change(), - buy_diff / sell_diff - ); - false - } else { - true - } + accept() } #[cfg(test)] @@ -369,9 +194,10 @@ mod tests { use address::testing::arb_non_internal_address; // Use this as `#[test]` annotation to enable logging use namada_tests::log::test; - use namada_tests::tx::{tx_host_env, TestTxEnv}; + use namada_tests::tx::{self, tx_host_env, TestTxEnv}; use namada_tests::vp::vp_host_env::storage::Key; use namada_tests::vp::*; + use namada_tx_prelude::{StorageWrite, TxEnv}; use namada_vp_prelude::key::RefTo; use proptest::prelude::*; use storage::testing::arb_account_storage_key_no_vp; @@ -392,7 +218,9 @@ mod tests { // The VP env must be initialized before calling `validate_tx` vp_host_env::init(); - assert!(validate_tx(tx_data, addr, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, addr, keys_changed, verifiers).unwrap() + ); } /// Test that a credit transfer is accepted. @@ -417,8 +245,14 @@ mod tests { vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Apply transfer in a transaction tx_host_env::token::transfer( - &source, address, &token, None, amount, - ); + tx::ctx(), + &source, + address, + &token, + None, + amount, + ) + .unwrap(); }); let vp_env = vp_host_env::take(); @@ -427,7 +261,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } /// Test that a debit transfer without a valid signature is rejected. @@ -452,8 +289,14 @@ mod tests { vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Apply transfer in a transaction tx_host_env::token::transfer( - address, &target, &token, None, amount, - ); + tx::ctx(), + address, + &target, + &token, + None, + amount, + ) + .unwrap(); }); let vp_env = vp_host_env::take(); @@ -462,7 +305,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(!validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + !validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } /// Test that a debit transfer with a valid signature is accepted. @@ -491,8 +337,14 @@ mod tests { vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Apply transfer in a transaction tx_host_env::token::transfer( - address, &target, &token, None, amount, - ); + tx::ctx(), + address, + &target, + &token, + None, + amount, + ) + .unwrap(); }); let mut vp_env = vp_host_env::take(); @@ -504,7 +356,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } /// Test that a transfer on with accounts other than self is accepted. @@ -528,11 +383,17 @@ mod tests { // Initialize VP environment from a transaction vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { - tx_host_env::insert_verifier(address); + tx::ctx().insert_verifier(address).unwrap(); // Apply transfer in a transaction tx_host_env::token::transfer( - &source, &target, &token, None, amount, - ); + tx::ctx(), + &source, + &target, + &token, + None, + amount, + ) + .unwrap(); }); let vp_env = vp_host_env::take(); @@ -541,7 +402,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } prop_compose! { @@ -581,9 +445,9 @@ mod tests { vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |_address| { // Write or delete some data in the transaction if let Some(value) = &storage_value { - tx_host_env::write(storage_key.to_string(), value); + tx::ctx().write(&storage_key, value).unwrap(); } else { - tx_host_env::delete(storage_key.to_string()); + tx::ctx().delete(&storage_key).unwrap(); } }); @@ -593,7 +457,7 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(!validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!(!validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers).unwrap()); } } @@ -623,9 +487,9 @@ mod tests { vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |_address| { // Write or delete some data in the transaction if let Some(value) = &storage_value { - tx_host_env::write(storage_key.to_string(), value); + tx::ctx().write(&storage_key, value).unwrap(); } else { - tx_host_env::delete(storage_key.to_string()); + tx::ctx().delete(&storage_key).unwrap(); } }); @@ -638,7 +502,7 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!(validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers).unwrap()); } } @@ -659,7 +523,9 @@ mod tests { // Initialize VP environment from a transaction vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Update VP in a transaction - tx_host_env::update_validity_predicate(address, &vp_code); + tx::ctx() + .update_validity_predicate(address, &vp_code) + .unwrap(); }); let vp_env = vp_host_env::take(); @@ -668,7 +534,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(!validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + !validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } /// Test that a validity predicate update with a valid signature is @@ -693,7 +562,9 @@ mod tests { // Initialize VP environment from a transaction vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Update VP in a transaction - tx_host_env::update_validity_predicate(address, &vp_code); + tx::ctx() + .update_validity_predicate(address, &vp_code) + .unwrap(); }); let mut vp_env = vp_host_env::take(); @@ -705,7 +576,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } /// Test that a validity predicate update is rejected if not whitelisted @@ -729,7 +603,9 @@ mod tests { // Initialize VP environment from a transaction vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Update VP in a transaction - tx_host_env::update_validity_predicate(address, &vp_code); + tx::ctx() + .update_validity_predicate(address, &vp_code) + .unwrap(); }); let mut vp_env = vp_host_env::take(); @@ -741,7 +617,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(!validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + !validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } /// Test that a validity predicate update is accepted if whitelisted @@ -767,7 +646,9 @@ mod tests { // Initialize VP environment from a transaction vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Update VP in a transaction - tx_host_env::update_validity_predicate(address, &vp_code); + tx::ctx() + .update_validity_predicate(address, &vp_code) + .unwrap(); }); let mut vp_env = vp_host_env::take(); @@ -779,7 +660,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } /// Test that a tx is rejected if not whitelisted @@ -809,7 +693,9 @@ mod tests { // Initialize VP environment from a transaction vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Update VP in a transaction - tx_host_env::update_validity_predicate(address, &vp_code); + tx::ctx() + .update_validity_predicate(address, &vp_code) + .unwrap(); }); let mut vp_env = vp_host_env::take(); @@ -821,7 +707,10 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(!validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + !validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } #[test] @@ -846,7 +735,9 @@ mod tests { // Initialize VP environment from a transaction vp_host_env::init_from_tx(vp_owner.clone(), tx_env, |address| { // Update VP in a transaction - tx_host_env::update_validity_predicate(address, &vp_code); + tx::ctx() + .update_validity_predicate(address, &vp_code) + .unwrap(); }); let mut vp_env = vp_host_env::take(); @@ -858,6 +749,9 @@ mod tests { vp_env.all_touched_storage_keys(); let verifiers: BTreeSet
= BTreeSet::default(); vp_host_env::set(vp_env); - assert!(validate_tx(tx_data, vp_owner, keys_changed, verifiers)); + assert!( + validate_tx(&CTX, tx_data, vp_owner, keys_changed, verifiers) + .unwrap() + ); } } diff --git a/wasm_for_tests/tx_memory_limit.wasm b/wasm_for_tests/tx_memory_limit.wasm index 88c8ef0ada..ba3d607877 100755 Binary files a/wasm_for_tests/tx_memory_limit.wasm and b/wasm_for_tests/tx_memory_limit.wasm differ diff --git a/wasm_for_tests/tx_mint_tokens.wasm b/wasm_for_tests/tx_mint_tokens.wasm index b6f8f074bf..b8ddaa495e 100755 Binary files a/wasm_for_tests/tx_mint_tokens.wasm and b/wasm_for_tests/tx_mint_tokens.wasm differ diff --git a/wasm_for_tests/tx_no_op.wasm b/wasm_for_tests/tx_no_op.wasm index bdab4054d9..d2d1de127e 100755 Binary files a/wasm_for_tests/tx_no_op.wasm and b/wasm_for_tests/tx_no_op.wasm differ diff --git a/wasm_for_tests/tx_proposal_code.wasm b/wasm_for_tests/tx_proposal_code.wasm index 3c753377ed..038ec12d9a 100755 Binary files a/wasm_for_tests/tx_proposal_code.wasm and b/wasm_for_tests/tx_proposal_code.wasm differ diff --git a/wasm_for_tests/tx_read_storage_key.wasm b/wasm_for_tests/tx_read_storage_key.wasm index b4266e6940..1c1f04b74b 100755 Binary files a/wasm_for_tests/tx_read_storage_key.wasm and b/wasm_for_tests/tx_read_storage_key.wasm differ diff --git a/wasm_for_tests/tx_write_storage_key.wasm b/wasm_for_tests/tx_write_storage_key.wasm index 89b669f6af..da08e72616 100755 Binary files a/wasm_for_tests/tx_write_storage_key.wasm and b/wasm_for_tests/tx_write_storage_key.wasm differ diff --git a/wasm_for_tests/vp_always_false.wasm b/wasm_for_tests/vp_always_false.wasm index 57b061685c..cdb840e4c2 100755 Binary files a/wasm_for_tests/vp_always_false.wasm and b/wasm_for_tests/vp_always_false.wasm differ diff --git a/wasm_for_tests/vp_always_true.wasm b/wasm_for_tests/vp_always_true.wasm index 8990ed651f..59eb75aa64 100755 Binary files a/wasm_for_tests/vp_always_true.wasm and b/wasm_for_tests/vp_always_true.wasm differ diff --git a/wasm_for_tests/vp_eval.wasm b/wasm_for_tests/vp_eval.wasm index 15b9ad7d67..846197f370 100755 Binary files a/wasm_for_tests/vp_eval.wasm and b/wasm_for_tests/vp_eval.wasm differ diff --git a/wasm_for_tests/vp_memory_limit.wasm b/wasm_for_tests/vp_memory_limit.wasm index 9d445b88fd..08e79ffa73 100755 Binary files a/wasm_for_tests/vp_memory_limit.wasm and b/wasm_for_tests/vp_memory_limit.wasm differ diff --git a/wasm_for_tests/vp_read_storage_key.wasm b/wasm_for_tests/vp_read_storage_key.wasm index fa5e6bcb3a..72b1d7efe6 100755 Binary files a/wasm_for_tests/vp_read_storage_key.wasm and b/wasm_for_tests/vp_read_storage_key.wasm differ diff --git a/wasm_for_tests/wasm_source/Cargo.lock b/wasm_for_tests/wasm_source/Cargo.lock index 56e8a94bfb..60d8733270 100644 --- a/wasm_for_tests/wasm_source/Cargo.lock +++ b/wasm_for_tests/wasm_source/Cargo.lock @@ -8,7 +8,7 @@ version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b" dependencies = [ - "gimli 0.26.1", + "gimli 0.26.2", ] [[package]] @@ -30,9 +30,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "0.7.18" +version = "0.7.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e" dependencies = [ "memchr", ] @@ -48,9 +48,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.56" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4361135be9122e0870de935d7c439aef945b9f9ddd4199a553b5270b49c82a27" +checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602" [[package]] name = "ark-bls12-381" @@ -163,9 +163,9 @@ checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" [[package]] name = "async-trait" -version = "0.1.53" +version = "0.1.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6aa3524a2dfcf9fe180c51eae2b58738348d819517ceadf95789c51fff7600" +checksum = "1e805d94e6b5001b651426cf4cd446b1ab5f319d27bab5c644f61de0a804360c" dependencies = [ "proc-macro2", "quote", @@ -180,16 +180,16 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "backtrace" -version = "0.3.64" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e121dee8023ce33ab248d9ce1493df03c3b38a659b240096fcbd7048ff9c31f" +checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7" dependencies = [ "addr2line", "cc", "cfg-if 1.0.0", "libc", "miniz_oxide", - "object 0.27.1", + "object 0.29.0", "rustc-demangle", ] @@ -207,9 +207,9 @@ checksum = "cf9ff0bbfd639f15c74af777d81383cf53efb7c93613f6cab67c6c11e05bbf8b" [[package]] name = "bit-set" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ "bit-vec", ] @@ -249,7 +249,7 @@ dependencies = [ "cc", "cfg-if 1.0.0", "constant_time_eq", - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -264,9 +264,9 @@ dependencies = [ [[package]] name = "block-buffer" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" +checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" dependencies = [ "generic-array", ] @@ -320,21 +320,21 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.9.1" +version = "3.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" +checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" [[package]] name = "byte-slice-cast" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87c5fdd0166095e1d463fc6cc01aa8ce547ad77a4e84d42eb6762b084e28067e" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] name = "bytecheck" -version = "0.6.7" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "314889ea31cda264cb7c3d6e6e5c9415a987ecb0e72c17c00d36fbb881d34abe" +checksum = "d11cac2c12b5adc6570dad2ee1b87eff4955dac476fe12d81e5fdd352e52406f" dependencies = [ "bytecheck_derive", "ptr_meta", @@ -342,9 +342,9 @@ dependencies = [ [[package]] name = "bytecheck_derive" -version = "0.6.7" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a2b3b92c135dae665a6f760205b89187638e83bed17ef3e44e83c712cf30600" +checksum = "13e576ebe98e605500b3c8041bb888e966653577172df6dd97398714eb30b9bf" dependencies = [ "proc-macro2", "quote", @@ -359,9 +359,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "1.1.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" +checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db" [[package]] name = "cc" @@ -398,6 +398,16 @@ name = "clru" version = "0.5.0" source = "git+https://github.com/marmeladema/clru-rs.git?rev=71ca566#71ca566915f21f3c308091ca7756a91b0f8b5afc" +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + [[package]] name = "concat-idents" version = "1.1.3" @@ -422,9 +432,9 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "cpufeatures" -version = "0.2.2" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b" +checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" dependencies = [ "libc", ] @@ -500,9 +510,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53" +checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" dependencies = [ "cfg-if 1.0.0", "crossbeam-utils", @@ -510,9 +520,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" dependencies = [ "cfg-if 1.0.0", "crossbeam-epoch", @@ -521,26 +531,24 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.8" +version = "0.9.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1145cf131a2c6ba0615079ab6a638f7e1973ac9c2634fcbeaaad6114246efe8c" +checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348" dependencies = [ "autocfg", "cfg-if 1.0.0", "crossbeam-utils", - "lazy_static", "memoffset", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.8" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" +checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac" dependencies = [ "cfg-if 1.0.0", - "lazy_static", ] [[package]] @@ -551,9 +559,9 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" [[package]] name = "crypto-common" -version = "0.1.3" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", "typenum", @@ -580,16 +588,60 @@ checksum = "1c359b7249347e46fb28804470d071c921156ad62b3eef5d34e2ba867533dec8" dependencies = [ "byteorder", "digest 0.9.0", - "rand_core 0.6.3", + "rand_core 0.6.4", "subtle-ng", "zeroize", ] +[[package]] +name = "cxx" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f83d0ebf42c6eafb8d7c52f7e5f2d3003b89c7aa4fd2b79229209459a849af8" +dependencies = [ + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07d050484b55975889284352b0ffc2ecbda25c0c55978017c132b29ba0818a86" +dependencies = [ + "cc", + "codespan-reporting", + "once_cell", + "proc-macro2", + "quote", + "scratch", + "syn", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d2199b00553eda8012dfec8d3b1c75fce747cf27c169a270b3b99e3448ab78" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb67a6de1f602736dd7eaead0080cf3435df806c61b24b13328db128c58868f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "darling" -version = "0.13.1" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0d720b8683f8dd83c65155f0530560cba68cd2bf395f6513a483caee57ff7f4" +checksum = "4529658bdda7fd6769b8614be250cdcfc3aeb0ee72fe66f9e41e5e5eb73eac02" dependencies = [ "darling_core", "darling_macro", @@ -597,23 +649,22 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.13.1" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a340f241d2ceed1deb47ae36c4144b2707ec7dd0b649f894cb39bb595986324" +checksum = "649c91bc01e8b1eac09fb91e8dbc7d517684ca6be8ebc75bb9cafc894f9fdb6f" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", - "strsim", "syn", ] [[package]] name = "darling_macro" -version = "0.13.1" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72c41b3b7352feb3211a0d743dc5700a4e3b60f51bd2b368892d1e0f9a95f44b" +checksum = "ddfc69c5bfcbd2fc09a0f38451d2daf0e372e367986a83906d1b0dbc88134fb5" dependencies = [ "darling_core", "quote", @@ -659,11 +710,11 @@ dependencies = [ [[package]] name = "digest" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" +checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c" dependencies = [ - "block-buffer 0.10.2", + "block-buffer 0.10.3", "crypto-common", "subtle", ] @@ -696,22 +747,22 @@ dependencies = [ [[package]] name = "ed25519" -version = "1.4.1" +version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d5c4b5e5959dc2c2b89918d8e2cc40fcdd623cef026ed09d2f0ee05199dc8e4" +checksum = "1e9c280362032ea4203659fc489832d0204ef09f247a0506f170dafcac08c369" dependencies = [ "signature", ] [[package]] name = "ed25519-consensus" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "542217a53411d471743362251a5a1770a667cb0cc0384c9be2c0952bd70a7275" +checksum = "758e2a0cd8a6cdf483e1d369e7d081647e00b88d8953e34d8f2cbba05ae28368" dependencies = [ "curve25519-dalek-ng", "hex", - "rand_core 0.6.3", + "rand_core 0.6.4", "serde", "sha2 0.9.9", "thiserror", @@ -732,9 +783,9 @@ dependencies = [ [[package]] name = "either" -version = "1.6.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" +checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" [[package]] name = "enum-iterator" @@ -758,18 +809,18 @@ dependencies = [ [[package]] name = "enumset" -version = "1.0.8" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6216d2c19a6fb5f29d1ada1dc7bc4367a8cbf0fa4af5cf12e07b5bbdde6b5b2c" +checksum = "19be8061a06ab6f3a6cf21106c873578bf01bd42ad15e0311a9c76161cb1c753" dependencies = [ "enumset_derive", ] [[package]] name = "enumset_derive" -version = "0.5.5" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6451128aa6655d880755345d085494cf7561a6bee7c8dc821e5d77e6d267ecd4" +checksum = "03e7b551eba279bf0fa88b83a46330168c1560a52a94f5126f892f0b364ab3e0" dependencies = [ "darling", "proc-macro2", @@ -789,7 +840,7 @@ dependencies = [ "regex", "serde", "serde_json", - "sha3 0.10.2", + "sha3 0.10.6", "thiserror", "uint", ] @@ -839,9 +890,9 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" [[package]] name = "fastrand" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" +checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" dependencies = [ "instant", ] @@ -849,7 +900,7 @@ dependencies = [ [[package]] name = "ferveo-common" version = "0.1.0" -source = "git+https://github.com/anoma/ferveo#8363c33d1cf79f93ce9fa89d4b5fe998a5a78c26" +source = "git+https://github.com/anoma/ferveo#1022ab2c7ccc689abcc05e5a08df6fb0c2a3fc65" dependencies = [ "anyhow", "ark-ec", @@ -873,9 +924,9 @@ dependencies = [ [[package]] name = "fixedbitset" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "279fb028e20b3c4c320317955b77c5e0c9701f05a1d309905d6fc702cdc5053e" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flex-error" @@ -901,9 +952,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" +checksum = "7f21eda599937fba36daeb58a22e8f5cee2d14c4a17b5b7739c7c8e5e3b8230c" dependencies = [ "futures-channel", "futures-core", @@ -915,9 +966,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" +checksum = "30bdd20c28fadd505d0fd6712cdfcb0d4b5648baf45faef7f852afb2399bb050" dependencies = [ "futures-core", "futures-sink", @@ -925,33 +976,33 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" +checksum = "4e5aa3de05362c3fb88de6531e6296e85cde7739cccad4b9dfeeb7f6ebce56bf" [[package]] name = "futures-io" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" +checksum = "bbf4d2a7a308fd4578637c0b17c7e1c7ba127b8f6ba00b29f717e9655d85eb68" [[package]] name = "futures-sink" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" +checksum = "21b20ba5a92e727ba30e72834706623d94ac93a725410b6a6b6fbc1b07f7ba56" [[package]] name = "futures-task" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" +checksum = "a6508c467c73851293f390476d4491cf4d227dbabcd4170f3bb6044959b294f1" [[package]] name = "futures-util" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" +checksum = "44fb6cb1be61cc1d2e43b262516aafcf63b241cffdb1d3fa115f91d9c7b09c90" dependencies = [ "futures-core", "futures-sink", @@ -962,9 +1013,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.5" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" dependencies = [ "typenum", "version_check", @@ -994,9 +1045,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.26.1" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4" +checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d" [[package]] name = "gumdrop" @@ -1029,9 +1080,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.12.0" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c21d40587b92fa6a6c6e3c1bdbf87d75511db5672f9c93175574b3a00df1758" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ "ahash", ] @@ -1062,17 +1113,28 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "iana-time-zone" -version = "0.1.50" +version = "0.1.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd911b35d940d2bd0bea0f9100068e5b97b51a1cbe13d13382f132e0365257a0" +checksum = "f5a6ef98976b22b3b7f2f3a806f858cb862044cfa66805aa3ad84cb3d3b785ed" dependencies = [ "android_system_properties", "core-foundation-sys", + "iana-time-zone-haiku", "js-sys", "wasm-bindgen", "winapi", ] +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +dependencies = [ + "cxx", + "cxx-build", +] + [[package]] name = "ibc" version = "0.14.0" @@ -1090,7 +1152,7 @@ dependencies = [ "serde", "serde_derive", "serde_json", - "sha2 0.10.2", + "sha2 0.10.6", "subtle-encoding", "tendermint", "tendermint-light-client-verifier", @@ -1181,12 +1243,12 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" [[package]] name = "indexmap" -version = "1.8.1" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f647032dfaa1f8b6dc29bd3edb7bbef4861b8b8007ebb118d6db284fd59f6ee" +checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" dependencies = [ "autocfg", - "hashbrown 0.11.2", + "hashbrown 0.12.3", "serde", ] @@ -1201,33 +1263,33 @@ dependencies = [ [[package]] name = "itertools" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] [[package]] name = "itoa" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" +checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" [[package]] name = "js-sys" -version = "0.3.56" +version = "0.3.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" +checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47" dependencies = [ "wasm-bindgen", ] [[package]] name = "keccak" -version = "0.1.0" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67c21572b4949434e4fc1e1978b99c5f77064153c59d998bf13ecd96fb5ecba7" +checksum = "f9b7d56ba4a8344d6be9729995e6b06f928af29998cdf79fe390cbf6b1fee838" [[package]] name = "lazy_static" @@ -1243,9 +1305,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" [[package]] name = "libc" -version = "0.2.134" +version = "0.2.135" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "329c933548736bc49fd575ee68c89e8be4d260064184389a5b77517cddd99ffb" +checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c" [[package]] name = "libloading" @@ -1299,11 +1361,20 @@ dependencies = [ "libsecp256k1-core", ] +[[package]] +name = "link-cplusplus" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369" +dependencies = [ + "cc", +] + [[package]] name = "log" -version = "0.4.16" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6389c490849ff5bc16be905ae24bc913a9c8892e19b2341dbc175e14c341c2b8" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" dependencies = [ "cfg-if 1.0.0", ] @@ -1349,15 +1420,15 @@ dependencies = [ [[package]] name = "memchr" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memmap2" -version = "0.5.3" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057a3db23999c867821a7a59feb06a578fcb03685e983dff90daf9e7d24ac08f" +checksum = "95af15f345b17af2efc8ead6080fb8bc376f8cec1b35277b935637595fe77498" dependencies = [ "libc", ] @@ -1379,12 +1450,11 @@ checksum = "8452105ba047068f40ff7093dd1d9da90898e63dd61736462e9cdda6a90ad3c3" [[package]] name = "miniz_oxide" -version = "0.4.4" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" +checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34" dependencies = [ "adler", - "autocfg", ] [[package]] @@ -1401,7 +1471,7 @@ checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" [[package]] name = "namada" -version = "0.7.1" +version = "0.8.1" dependencies = [ "ark-bls12-381", "ark-serialize", @@ -1430,7 +1500,7 @@ dependencies = [ "prost-types", "pwasm-utils", "rand", - "rand_core 0.6.3", + "rand_core 0.6.4", "rust_decimal", "serde", "serde_json", @@ -1455,7 +1525,7 @@ dependencies = [ [[package]] name = "namada_macros" -version = "0.7.1" +version = "0.8.1" dependencies = [ "quote", "syn", @@ -1463,22 +1533,24 @@ dependencies = [ [[package]] name = "namada_proof_of_stake" -version = "0.7.1" +version = "0.8.1" dependencies = [ "borsh", + "derivative", "proptest", "thiserror", ] [[package]] name = "namada_tests" -version = "0.7.1" +version = "0.8.1" dependencies = [ "chrono", "concat-idents", "derivative", "namada", - "namada_vm_env", + "namada_tx_prelude", + "namada_vp_prelude", "prost", "serde_json", "sha2 0.9.9", @@ -1490,39 +1562,44 @@ dependencies = [ [[package]] name = "namada_tx_prelude" -version = "0.7.1" +version = "0.8.1" dependencies = [ + "borsh", + "namada", + "namada_macros", "namada_vm_env", - "sha2 0.10.2", + "sha2 0.10.6", + "thiserror", ] [[package]] name = "namada_vm_env" -version = "0.7.1" +version = "0.8.1" dependencies = [ "borsh", - "hex", "namada", - "namada_macros", ] [[package]] name = "namada_vp_prelude" -version = "0.7.1" +version = "0.8.1" dependencies = [ + "borsh", + "namada", + "namada_macros", "namada_vm_env", - "sha2 0.10.2", + "sha2 0.10.6", + "thiserror", ] [[package]] name = "namada_wasm_for_tests" -version = "0.7.1" +version = "0.8.1" dependencies = [ "borsh", "getrandom", "namada_tests", "namada_tx_prelude", - "namada_vm_env", "namada_vp_prelude", "wee_alloc", ] @@ -1551,9 +1628,9 @@ dependencies = [ [[package]] name = "num-integer" -version = "0.1.44" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" dependencies = [ "autocfg", "num-traits", @@ -1573,9 +1650,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", ] @@ -1592,39 +1669,39 @@ dependencies = [ [[package]] name = "num_threads" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aba1801fb138d8e85e11d0fc70baf4fe1cdfffda7c6cd34a854905df588e5ed0" +checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" dependencies = [ "libc", ] [[package]] name = "object" -version = "0.27.1" +version = "0.28.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9" +checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424" dependencies = [ + "crc32fast", + "hashbrown 0.11.2", + "indexmap", "memchr", ] [[package]] name = "object" -version = "0.28.3" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40bec70ba014595f99f7aa110b84331ffe1ee9aece7fe6f387cc7e3ecda4d456" +checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" dependencies = [ - "crc32fast", - "hashbrown 0.11.2", - "indexmap", "memchr", ] [[package]] name = "once_cell" -version = "1.13.1" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e" +checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" [[package]] name = "opaque-debug" @@ -1634,9 +1711,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "parity-scale-codec" -version = "3.1.5" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9182e4a71cae089267ab03e67c99368db7cd877baf50f931e5d6d4b71e195ac0" +checksum = "366e44391a8af4cfd6002ef6ba072bae071a96aafca98d7d448a34c5dca38b6a" dependencies = [ "arrayvec", "bitvec", @@ -1666,24 +1743,25 @@ checksum = "be5e13c266502aadf83426d87d81a0f5d1ef45b8027f5a471c360abfe4bfae92" [[package]] name = "paste" -version = "1.0.7" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc" +checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" [[package]] name = "pest" -version = "2.1.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" +checksum = "dbc7bc69c062e492337d74d59b120c274fd3d261b6bf6d3207d499b4b379c41a" dependencies = [ + "thiserror", "ucd-trie", ] [[package]] name = "petgraph" -version = "0.6.0" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a13a2fa9d0b63e5f22328828741e523766fff0ee9e779316902290dff3f824f" +checksum = "e6d5014253a1331579ce62aa67443b4a658c5e7dd03d4bc6d302b94474888143" dependencies = [ "fixedbitset", "indexmap", @@ -1691,9 +1769,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" [[package]] name = "pin-utils" @@ -1766,11 +1844,11 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.36" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" +checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" dependencies = [ - "unicode-xid", + "unicode-ident", ] [[package]] @@ -1890,9 +1968,9 @@ checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" [[package]] name = "quote" -version = "1.0.17" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "632d02bff7f874a36f33ea8bb416cd484b90cc66c1194b1a1110d067a7013f58" +checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" dependencies = [ "proc-macro2", ] @@ -1911,7 +1989,7 @@ checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -1921,7 +1999,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -1932,9 +2010,9 @@ checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" [[package]] name = "rand_core" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom", ] @@ -1945,14 +2023,14 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] name = "rayon" -version = "1.5.1" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" +checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d" dependencies = [ "autocfg", "crossbeam-deque", @@ -1962,22 +2040,21 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.9.1" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" +checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" dependencies = [ "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "lazy_static", "num_cpus", ] [[package]] name = "redox_syscall" -version = "0.2.13" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ "bitflags", ] @@ -1995,9 +2072,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.5.5" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286" +checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" dependencies = [ "aho-corasick", "memchr", @@ -2015,9 +2092,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.25" +version = "0.6.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" +checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" [[package]] name = "region" @@ -2062,12 +2139,12 @@ dependencies = [ [[package]] name = "rkyv" -version = "0.7.37" +version = "0.7.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f08c8062c1fe1253064043b8fc07bfea1b9702b71b4a86c11ea3588183b12e1" +checksum = "cec2b3485b07d96ddfd3134767b8a447b45ea4eb91448d0a35180ec0ffd5ed15" dependencies = [ "bytecheck", - "hashbrown 0.12.0", + "hashbrown 0.12.3", "ptr_meta", "rend", "rkyv_derive", @@ -2076,9 +2153,9 @@ dependencies = [ [[package]] name = "rkyv_derive" -version = "0.7.37" +version = "0.7.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e289706df51226e84814bf6ba1a9e1013112ae29bc7a9878f73fce360520c403" +checksum = "6eaedadc88b53e36dd32d940ed21ae4d850d5916f2581526921f553a72ac34c4" dependencies = [ "proc-macro2", "quote", @@ -2087,9 +2164,9 @@ dependencies = [ [[package]] name = "rlp" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "999508abb0ae792aabed2460c45b89106d97fe4adac593bdaef433c2605847b5" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" dependencies = [ "bytes", "rustc-hex", @@ -2097,9 +2174,9 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.23.1" +version = "1.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22dc69eadbf0ee2110b8d20418c0c6edbaefec2811c4963dc17b6344e11fe0f8" +checksum = "ee9164faf726e4f3ece4978b25ca877ddc6802fa77f38cdccb32c7f805ecd70c" dependencies = [ "arrayvec", "num-traits", @@ -2135,9 +2212,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.6" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" +checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" [[package]] name = "rusty-fork" @@ -2153,9 +2230,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.9" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" +checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" [[package]] name = "safe-proc-macro2" @@ -2210,6 +2287,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +[[package]] +name = "scratch" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898" + [[package]] name = "seahash" version = "4.1.0" @@ -2236,27 +2319,27 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.136" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" +checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b" dependencies = [ "serde_derive", ] [[package]] name = "serde_bytes" -version = "0.11.5" +version = "0.11.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9" +checksum = "cfc50e8183eeeb6178dcb167ae34a8051d63535023ae38b5d8d12beae193d37b" dependencies = [ "serde", ] [[package]] name = "serde_derive" -version = "1.0.136" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" +checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c" dependencies = [ "proc-macro2", "quote", @@ -2265,9 +2348,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.79" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" +checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074" dependencies = [ "itoa", "ryu", @@ -2276,9 +2359,9 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.7" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98d0516900518c29efa217c298fa1f4e6c6ffc85ae29fd7f4ee48f176e1a9ed5" +checksum = "1fe39d9fbb0ebf5eb2c7cb7e2a47e4f462fad1379f1166b8ae49ad9eae89a7ca" dependencies = [ "proc-macro2", "quote", @@ -2300,13 +2383,13 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.2" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" dependencies = [ "cfg-if 1.0.0", "cpufeatures", - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -2323,11 +2406,11 @@ dependencies = [ [[package]] name = "sha3" -version = "0.10.2" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a31480366ec990f395a61b7c08122d99bd40544fdb5abcfc1b06bb29994312c" +checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" dependencies = [ - "digest 0.10.3", + "digest 0.10.5", "keccak", ] @@ -2342,9 +2425,9 @@ dependencies = [ [[package]] name = "signature" -version = "1.4.0" +version = "1.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02658e48d89f2bec991f9a78e69cfa4c316f8d6a6c4ec12fae1aeb263d486788" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" [[package]] name = "simple-error" @@ -2354,9 +2437,9 @@ checksum = "cc47a29ce97772ca5c927f75bac34866b16d64e07f330c3248e2d7226623901b" [[package]] name = "smallvec" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" [[package]] name = "sp-std" @@ -2367,7 +2450,7 @@ checksum = "35391ea974fa5ee869cb094d5b437688fbf3d8127d64d1b9fed5822a1ed39b12" [[package]] name = "sparse-merkle-tree" version = "0.3.1-pre" -source = "git+https://github.com/heliaxdev/sparse-merkle-tree?branch=bat/arse-merkle-tree#04ad1eeb28901b57a7599bbe433b3822965dabe8" +source = "git+https://github.com/heliaxdev/sparse-merkle-tree?rev=04ad1eeb28901b57a7599bbe433b3822965dabe8#04ad1eeb28901b57a7599bbe433b3822965dabe8" dependencies = [ "borsh", "cfg-if 1.0.0", @@ -2387,12 +2470,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - [[package]] name = "subtle" version = "2.4.1" @@ -2416,13 +2493,13 @@ checksum = "734676eb262c623cec13c3155096e08d1f8f29adce39ba17948b18dad1e54142" [[package]] name = "syn" -version = "1.0.90" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "704df27628939572cd88d33f171cd6f896f4eaca85252c6e0a72d8d8287ee86f" +checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1" dependencies = [ "proc-macro2", "quote", - "unicode-xid", + "unicode-ident", ] [[package]] @@ -2445,9 +2522,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "target-lexicon" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7fa7e55043acb85fca6b3c01485a2eeb6b69c5d21002e273c79e465f43b7ac1" +checksum = "c02424087780c9b71cc96799eaeddff35af2bc513278cda5c99fc1f5d026d3c1" [[package]] name = "tempfile" @@ -2535,11 +2612,20 @@ dependencies = [ "time", ] +[[package]] +name = "termcolor" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +dependencies = [ + "winapi-util", +] + [[package]] name = "test-log" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4235dbf7ea878b3ef12dea20a59c134b405a66aafc4fc2c7b9935916e289e735" +checksum = "38f0c854faeb68a048f0f2dc410c5ddae3bf83854ef0e4977d58306a5edef50e" dependencies = [ "proc-macro2", "quote", @@ -2548,18 +2634,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.30" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.30" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" dependencies = [ "proc-macro2", "quote", @@ -2577,9 +2663,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.9" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd" +checksum = "d634a985c4d4238ec39cacaed2e7ae552fbd3c476b552c1deac3021b7d7eaf0c" dependencies = [ "libc", "num_threads", @@ -2603,9 +2689,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" +checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" dependencies = [ "serde", ] @@ -2624,9 +2710,9 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.34" +version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0ecdcb44a79f0fe9844f0c4f33a342cbcbb5117de8001e6ba0dc2351327d09" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" dependencies = [ "cfg-if 1.0.0", "log", @@ -2637,9 +2723,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.20" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e65ce065b4b5c53e73bb28912318cb8c9e9ad3921f1d669eb0e68b4c8143a2b" +checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" dependencies = [ "proc-macro2", "quote", @@ -2648,22 +2734,21 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.23" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa31669fa42c09c34d94d8165dd2012e8ff3c66aca50f3bb226b68f216f2706c" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" dependencies = [ - "lazy_static", - "valuable", + "once_cell", ] [[package]] name = "tracing-subscriber" -version = "0.3.9" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e0ab7bdc962035a87fba73f3acca9b8a8d0034c2e6f60b84aeaaddddc155dce" +checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70" dependencies = [ - "lazy_static", "matchers", + "once_cell", "regex", "sharded-slab", "thread_local", @@ -2679,15 +2764,15 @@ checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" [[package]] name = "ucd-trie" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" +checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" [[package]] name = "uint" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f03af7ccf01dd611cc450a0d10dbc9b745770d096473e2faf0ca6e2d66d1e0" +checksum = "a45526d29728d135c2900b0d30573fe3ee79fceb12ef534c7bb30e810a91b601" dependencies = [ "byteorder", "crunchy", @@ -2696,28 +2781,28 @@ dependencies = [ ] [[package]] -name = "unicode-segmentation" -version = "1.9.0" +name = "unicode-ident" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" +checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" [[package]] -name = "unicode-width" -version = "0.1.9" +name = "unicode-segmentation" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" +checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" [[package]] -name = "unicode-xid" -version = "0.2.2" +name = "unicode-width" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] -name = "valuable" -version = "0.1.0" +name = "unicode-xid" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "version_check" @@ -2742,9 +2827,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.79" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" +checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268" dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -2752,13 +2837,13 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.79" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" +checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142" dependencies = [ "bumpalo", - "lazy_static", "log", + "once_cell", "proc-macro2", "quote", "syn", @@ -2767,9 +2852,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.79" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" +checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2777,9 +2862,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.79" +version = "0.2.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" +checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" dependencies = [ "proc-macro2", "quote", @@ -2790,9 +2875,18 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.79" +version = "0.2.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f" + +[[package]] +name = "wasm-encoder" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" +checksum = "c64ac98d5d61192cc45c701b7e4bd0b9aff91e2edfc7a088406cfe2288581e2c" +dependencies = [ + "leb128", +] [[package]] name = "wasmer" @@ -2937,7 +3031,7 @@ dependencies = [ "leb128", "libloading", "loupe", - "object 0.28.3", + "object 0.28.4", "rkyv", "serde", "tempfile", @@ -2976,7 +3070,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d0c4005592998bd840f2289102ef9c67b6138338ed78e1fc0809586aa229040" dependencies = [ - "object 0.28.3", + "object 0.28.4", "thiserror", "wasmer-compiler", "wasmer-types", @@ -3032,20 +3126,21 @@ checksum = "718ed7c55c2add6548cca3ddd6383d738cd73b892df400e96b9aa876f0141d7a" [[package]] name = "wast" -version = "39.0.0" +version = "47.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9bbbd53432b267421186feee3e52436531fa69a7cfee9403f5204352df3dd05" +checksum = "02b98502f3978adea49551e801a6687678e6015317d7d9470a67fe813393f2a8" dependencies = [ "leb128", "memchr", "unicode-width", + "wasm-encoder", ] [[package]] name = "wat" -version = "1.0.41" +version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab98ed25494f97c69f28758617f27c3e92e5336040b5c3a14634f2dd3fe61830" +checksum = "7aab4e20c60429fbba9670a6cae0fff9520046ba0aa3e6d0b1cd2653bea14898" dependencies = [ "wast", ] @@ -3064,13 +3159,13 @@ dependencies = [ [[package]] name = "which" -version = "4.2.5" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c4fb54e6113b6a8772ee41c3404fb0301ac79604489467e0a9ce1f3e97c24ae" +checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b" dependencies = [ "either", - "lazy_static", "libc", + "once_cell", ] [[package]] @@ -3089,6 +3184,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" diff --git a/wasm_for_tests/wasm_source/Cargo.toml b/wasm_for_tests/wasm_source/Cargo.toml index 0436d8b44b..90cb8ebb58 100644 --- a/wasm_for_tests/wasm_source/Cargo.toml +++ b/wasm_for_tests/wasm_source/Cargo.toml @@ -4,7 +4,7 @@ edition = "2021" license = "GPL-3.0" name = "namada_wasm_for_tests" resolver = "2" -version = "0.7.1" +version = "0.8.1" [lib] crate-type = ["cdylib"] @@ -26,7 +26,6 @@ tx_proposal_code = [] [dependencies] namada_tx_prelude = {path = "../../tx_prelude"} -namada_vm_env = {path = "../../vm_env"} namada_vp_prelude = {path = "../../vp_prelude"} borsh = "0.9.1" wee_alloc = "0.4.5" @@ -38,7 +37,6 @@ borsh = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4 borsh-derive = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} borsh-derive-internal = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} borsh-schema-derive-internal = {git = "https://github.com/heliaxdev/borsh-rs.git", rev = "cd5223e5103c4f139e0c54cf8259b7ec5ec4073a"} - # patched to a commit on the `eth-bridge-integration` branch of our fork tendermint = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} tendermint-proto = {git = "https://github.com/heliaxdev/tendermint-rs.git", rev = "87be41b8c9cc2850830f4d8028c1fe1bd9f96284"} diff --git a/wasm_for_tests/wasm_source/Makefile b/wasm_for_tests/wasm_source/Makefile index 142199dbc9..38ed4a890f 100644 --- a/wasm_for_tests/wasm_source/Makefile +++ b/wasm_for_tests/wasm_source/Makefile @@ -46,7 +46,7 @@ fmt-check: # Build a selected wasm $(wasms): %: - $(cargo) build --release --target wasm32-unknown-unknown --features $@ && \ + $(cargo) build --release --target wasm32-unknown-unknown --target-dir 'target' --features $@ && \ cp "./target/wasm32-unknown-unknown/release/namada_wasm_for_tests.wasm" ../$@.wasm # `cargo check` one of the wasms, e.g. `make check_tx_no_op` diff --git a/wasm_for_tests/wasm_source/release.toml b/wasm_for_tests/wasm_source/release.toml new file mode 100644 index 0000000000..dbacf2cec3 --- /dev/null +++ b/wasm_for_tests/wasm_source/release.toml @@ -0,0 +1,7 @@ +allow-branch = ["main", "maint-*"] +consolidate-commits = true +pre-release-commit-message = "fixup! Namada {{version}}" +publish = false +push = false +shared-version = true +tag = false diff --git a/wasm_for_tests/wasm_source/src/lib.rs b/wasm_for_tests/wasm_source/src/lib.rs index 1062d1ce43..dc90524461 100644 --- a/wasm_for_tests/wasm_source/src/lib.rs +++ b/wasm_for_tests/wasm_source/src/lib.rs @@ -1,66 +1,67 @@ /// A tx that doesn't do anything. #[cfg(feature = "tx_no_op")] pub mod main { - use namada_vm_env::tx_prelude::*; + use namada_tx_prelude::*; #[transaction] - fn apply_tx(_tx_data: Vec) {} + fn apply_tx(_ctx: &mut Ctx, _tx_data: Vec) -> TxResult { + Ok(()) + } } /// A tx that allocates a memory of size given from the `tx_data: usize`. #[cfg(feature = "tx_memory_limit")] pub mod main { - use namada_vm_env::tx_prelude::*; + use namada_tx_prelude::*; #[transaction] - fn apply_tx(tx_data: Vec) { + fn apply_tx(_ctx: &mut Ctx, tx_data: Vec) -> TxResult { let len = usize::try_from_slice(&tx_data[..]).unwrap(); log_string(format!("allocate len {}", len)); let bytes: Vec = vec![6_u8; len]; // use the variable to prevent it from compiler optimizing it away log_string(format!("{:?}", &bytes[..8])); + Ok(()) } } /// A tx to be used as proposal_code #[cfg(feature = "tx_proposal_code")] pub mod main { - use namada_vm_env::tx_prelude::*; + use namada_tx_prelude::*; #[transaction] - fn apply_tx(_tx_data: Vec) { + fn apply_tx(ctx: &mut Ctx, _tx_data: Vec) -> TxResult { // governance - let target_key = storage::get_min_proposal_grace_epoch_key(); - write(&target_key.to_string(), 9_u64); - - // treasury - let target_key = treasury_storage::get_max_transferable_fund_key(); - write(&target_key.to_string(), token::Amount::whole(20_000)); + let target_key = gov_storage::get_min_proposal_grace_epoch_key(); + ctx.write(&target_key, 9_u64)?; // parameters let target_key = parameters_storage::get_tx_whitelist_storage_key(); - write(&target_key.to_string(), vec!["hash"]); + ctx.write(&target_key, vec!["hash"])?; + Ok(()) } } /// A tx that attempts to read the given key from storage. #[cfg(feature = "tx_read_storage_key")] pub mod main { - use namada_vm_env::tx_prelude::*; + use namada_tx_prelude::*; #[transaction] - fn apply_tx(tx_data: Vec) { + fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { // Allocates a memory of size given from the `tx_data (usize)` - let key = Key::try_from_slice(&tx_data[..]).unwrap(); + let key = storage::Key::try_from_slice(&tx_data[..]).unwrap(); log_string(format!("key {}", key)); - let _result: Vec = read(key.to_string()).unwrap(); + let _result: Vec = ctx.read(&key)?.unwrap(); + Ok(()) } } /// A tx that attempts to write arbitrary data to the given key #[cfg(feature = "tx_write_storage_key")] pub mod main { - use namada_vm_env::tx_prelude::*; + use namada_tx_prelude::*; const TX_NAME: &str = "tx_write"; @@ -81,14 +82,9 @@ pub mod main { } #[transaction] - fn apply_tx(tx_data: Vec) { - let signed = match SignedTxData::try_from_slice(&tx_data[..]) { - Ok(signed) => { - log("got signed data"); - signed - } - Err(error) => fatal("getting signed data", error), - }; + fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; let data = match signed.data { Some(data) => { log(&format!("got data ({} bytes)", data.len())); @@ -100,15 +96,15 @@ pub mod main { }; let key = match String::from_utf8(data) { Ok(key) => { + let key = storage::Key::parse(key).unwrap(); log(&format!("parsed key from data: {}", key)); key } Err(error) => fatal("getting key", error), }; - let val: Option> = read(key.as_str()); + let val: Option = ctx.read(&key)?; match val { Some(val) => { - let val = String::from_utf8(val).unwrap(); log(&format!("preexisting val is {}", val)); } None => { @@ -119,7 +115,8 @@ pub mod main { "attempting to write new value {} to key {}", ARBITRARY_VALUE, key )); - write(key.as_str(), ARBITRARY_VALUE); + ctx.write(&key, ARBITRARY_VALUE)?; + Ok(()) } } @@ -128,11 +125,12 @@ pub mod main { /// token's VP. #[cfg(feature = "tx_mint_tokens")] pub mod main { - use namada_vm_env::tx_prelude::*; + use namada_tx_prelude::*; #[transaction] - fn apply_tx(tx_data: Vec) { - let signed = SignedTxData::try_from_slice(&tx_data[..]).unwrap(); + fn apply_tx(ctx: &mut Ctx, tx_data: Vec) -> TxResult { + let signed = SignedTxData::try_from_slice(&tx_data[..]) + .wrap_err("failed to decode SignedTxData")?; let transfer = token::Transfer::try_from_slice(&signed.data.unwrap()[..]).unwrap(); log_string(format!("apply_tx called to mint tokens: {:#?}", transfer)); @@ -145,41 +143,44 @@ pub mod main { } = transfer; let target_key = token::balance_key(&token, &target); let mut target_bal: token::Amount = - read(&target_key.to_string()).unwrap_or_default(); + ctx.read(&target_key)?.unwrap_or_default(); target_bal.receive(&amount); - write(&target_key.to_string(), target_bal); + ctx.write(&target_key, target_bal)?; + Ok(()) } } /// A VP that always returns `true`. #[cfg(feature = "vp_always_true")] pub mod main { - use namada_vm_env::vp_prelude::*; + use namada_vp_prelude::*; #[validity_predicate] fn validate_tx( + _ctx: &Ctx, _tx_data: Vec, _addr: Address, _keys_changed: BTreeSet, _verifiers: BTreeSet
, - ) -> bool { - true + ) -> VpResult { + accept() } } /// A VP that always returns `false`. #[cfg(feature = "vp_always_false")] pub mod main { - use namada_vm_env::vp_prelude::*; + use namada_vp_prelude::*; #[validity_predicate] fn validate_tx( + _ctx: &Ctx, _tx_data: Vec, _addr: Address, _keys_changed: BTreeSet, _verifiers: BTreeSet
, - ) -> bool { - false + ) -> VpResult { + reject() } } @@ -187,19 +188,20 @@ pub mod main { /// of `eval`. #[cfg(feature = "vp_eval")] pub mod main { - use namada_vm_env::vp_prelude::*; + use namada_vp_prelude::*; #[validity_predicate] fn validate_tx( + ctx: &Ctx, tx_data: Vec, _addr: Address, _keys_changed: BTreeSet, _verifiers: BTreeSet
, - ) -> bool { + ) -> VpResult { use validity_predicate::EvalVp; let EvalVp { vp_code, input }: EvalVp = EvalVp::try_from_slice(&tx_data[..]).unwrap(); - eval(vp_code, input) + ctx.eval(vp_code, input) } } @@ -207,21 +209,22 @@ pub mod main { // Returns `true`, if the allocation is within memory limits. #[cfg(feature = "vp_memory_limit")] pub mod main { - use namada_vm_env::vp_prelude::*; + use namada_vp_prelude::*; #[validity_predicate] fn validate_tx( + _ctx: &Ctx, tx_data: Vec, _addr: Address, _keys_changed: BTreeSet, _verifiers: BTreeSet
, - ) -> bool { + ) -> VpResult { let len = usize::try_from_slice(&tx_data[..]).unwrap(); log_string(format!("allocate len {}", len)); let bytes: Vec = vec![6_u8; len]; // use the variable to prevent it from compiler optimizing it away log_string(format!("{:?}", &bytes[..8])); - true + accept() } } @@ -229,19 +232,20 @@ pub mod main { /// execution). Returns `true`, if the allocation is within memory limits. #[cfg(feature = "vp_read_storage_key")] pub mod main { - use namada_vm_env::vp_prelude::*; + use namada_vp_prelude::*; #[validity_predicate] fn validate_tx( + ctx: &Ctx, tx_data: Vec, _addr: Address, _keys_changed: BTreeSet, _verifiers: BTreeSet
, - ) -> bool { + ) -> VpResult { // Allocates a memory of size given from the `tx_data (usize)` - let key = Key::try_from_slice(&tx_data[..]).unwrap(); + let key = storage::Key::try_from_slice(&tx_data[..]).unwrap(); log_string(format!("key {}", key)); - let _result: Vec = read_pre(key.to_string()).unwrap(); - true + let _result: Vec = ctx.read_pre(&key)?.unwrap(); + accept() } }