From 786cb31fda9541a64628f9b0f5535f6bfc4ffc9e Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Mon, 17 Jun 2024 15:40:12 -0700 Subject: [PATCH 01/77] updated gui to electron 30.0.9 (#18194) * update GUI pin to electron 30.0.9 55cdd0351eec1e7da8977d31b09c0e4d5a1f7a79 * Update GUI pin to merged commit hash for 30.0.9 --- chia-blockchain-gui | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index 5d6442180f9d..4e2c293b954a 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit 5d6442180f9ddfda756c0c89de5ed56f2f225f06 +Subproject commit 4e2c293b954a517a9d91de66b5fa8fc248c7b889 From 4ce568b129899591bdb26e5fcba7d16785054646 Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Tue, 18 Jun 2024 12:26:50 -0700 Subject: [PATCH 02/77] Update Changelog for 2.4.0 release (#18200) * 2.4.0 changelog * minor update * minor updates --- CHANGELOG.md | 65 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1d5c16131421..7bc7204696ee 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,71 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project does not yet adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html) for setuptools_scm/PEP 440 reasons. +## 2.4.0 Chia blockchain 2024-06-20 + +## What's Changed +### Added +* Soft fork 5: disallow infinity G1 points as public keys in AGG_SIG_* conditions +* DL: Added support for updating multiple datastores in a single batch update +* Add unfinished block to state change event (thanks @felixbrucker) +* CHIP-0026 Mempool Updates +* Preliminary support for observer mode. Ability to add public keys via CLI + +### Changed +* Remove `tx_records` from `dl_update_multiple` RPC (breaking change) +* DL: optimizations for autoinsert and upsert +* Increase farmer fill rate to 70% +* Use Rust types for `RecentChainData`, `ProofBlockHeader` and `WeightProof` +* Use Rust version of `MerkleSet` +* Remove unused files +* Make a couple of DAOWallet methods return lists of transaction records +* Simplify `MerkleSet` by making it immutable +* Add ability to profile the farmer process +* Remove unused current_inner from PoolState +* Optimize `launcher_id_to_p2_puzzle_hash()` +* Add genesis challenge to `get_network_info` RPC +* Puzzle hash optimizations +* Optimize key derivation in the wallet +* Add optional trusted CIDR list +* Make `BLSCache` a proper class +* Split capabilities for each service +* Use kv compressed in DL batch update +* Updated gui to `electron 30.0.9` +* Bump `chia_rs` to `0.9.0` and update G1Element handling +* Bump `boto3` to `1.34.114` +* Bump `chiabip158` to `1.5.1` +* Bump `clvm` to `0.9.10` +* Bump `aiohttp` to `3.9.4` +* Bump `filelock` to `3.14.0` +* Bump `importlib-resources` to `6.4.0` +* Bump `keyring` to `25.1.0` +* Bump `dnspython` to `2.6.1` +* Bump `typing-extensions` to `4.11.0`, +* Bump `packaging` to `24.0` +* Bump `hsms` to `0.3.1`, + +### Fixed +* Add bytes type to `DerivationRecord.pubkey` +* Do not return unexpected coins from `get_coin_state` +* Fix memo plotid +* Filter out duplicate coins returned by `RequestPuzzleState` +* fix confusion between prompt and don't prompt in the plotnft CLI +* drop deprecated `authentication_public_key` from pool config +* Fixed some typos (thanks @wersfeds) +* Make sure to use no more than 61 cpus on windows (fixes #17967) +* Handle reorgs in data layer wallet +* Modify `VerifiedCredential.launch` to handle multiple source coins +* Add tx_config and extra_conditions to DID creation endpoint +* DL: Return exception and error from `get_kv_diff` when either of the hashes has no data +* Link trade cancellations with announcements +* Add coin id index to coin state batching +* Remove homebrew rpaths from `_ssl.cpython.so` on macOS during build (fixes #18099) +* Aligned `lerna` and `nx` versions +* Set permissions in DEB `postinst.sh` for chrome-sandbox (fixes #17956) + +### Deprecated +macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release to support macOS 11 + ## 2.3.1 Chia blockchain 2024-04-28 ### Added From 750b6ded1a8926c0013284d295528f5a23642624 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Thu, 20 Jun 2024 14:09:47 +0100 Subject: [PATCH 03/77] CHIA-617 Annotate test_mempool.py (#18136) Annotate test_mempool.py. --- chia/_tests/core/mempool/test_mempool.py | 636 +++++++++++++++-------- mypy-exclusions.txt | 1 - 2 files changed, 430 insertions(+), 207 deletions(-) diff --git a/chia/_tests/core/mempool/test_mempool.py b/chia/_tests/core/mempool/test_mempool.py index b87f51a27053..7f1b78263259 100644 --- a/chia/_tests/core/mempool/test_mempool.py +++ b/chia/_tests/core/mempool/test_mempool.py @@ -35,8 +35,10 @@ from chia.protocols import full_node_protocol, wallet_protocol from chia.protocols.wallet_protocol import TransactionAck from chia.server.outbound_message import Message +from chia.server.server import ChiaServer from chia.server.ws_connection import WSChiaConnection -from chia.simulator.block_tools import test_constants +from chia.simulator.block_tools import BlockTools, test_constants +from chia.simulator.full_node_simulator import FullNodeSimulator from chia.simulator.simulator_protocol import FarmNewBlockProtocol from chia.simulator.wallet_tools import WalletTool from chia.types.blockchain_format.coin import Coin @@ -49,6 +51,7 @@ from chia.types.condition_with_args import ConditionWithArgs from chia.types.eligible_coin_spends import UnspentLineageInfo, run_for_cost from chia.types.fee_rate import FeeRate +from chia.types.full_block import FullBlock from chia.types.generator_types import BlockGenerator from chia.types.mempool_inclusion_status import MempoolInclusionStatus from chia.types.mempool_item import MempoolItem @@ -76,17 +79,17 @@ def new_mi(mi: MempoolInfo, max_mempool_cost: int, min_replace_fee_per_cost: int @pytest.fixture(scope="module") -def wallet_a(bt): +def wallet_a(bt: BlockTools) -> WalletTool: return bt.get_pool_wallet_tool() def generate_test_spend_bundle( wallet: WalletTool, coin: Coin, - condition_dic: Dict[ConditionOpcode, List[ConditionWithArgs]] = None, + condition_dic: Optional[Dict[ConditionOpcode, List[ConditionWithArgs]]] = None, fee: uint64 = uint64(0), amount: uint64 = uint64(1000), - new_puzzle_hash=BURN_PUZZLE_HASH, + new_puzzle_hash: bytes32 = BURN_PUZZLE_HASH, ) -> SpendBundle: if condition_dic is None: condition_dic = {} @@ -95,7 +98,7 @@ def generate_test_spend_bundle( return transaction -def make_item(idx: int, cost: uint64 = uint64(80), assert_height=100) -> MempoolItem: +def make_item(idx: int, cost: uint64 = uint64(80), assert_height: uint32 = uint32(100)) -> MempoolItem: spend_bundle_name = bytes32([idx] * 32) return MempoolItem( SpendBundle([], G2Element()), @@ -108,7 +111,7 @@ def make_item(idx: int, cost: uint64 = uint64(80), assert_height=100) -> Mempool class TestConflictTxCache: - def test_recall(self): + def test_recall(self) -> None: c = ConflictTxCache(100) item = make_item(1) c.add(item) @@ -116,7 +119,7 @@ def test_recall(self): tx = c.drain() assert tx == {item.spend_bundle_name: item} - def test_fifo_limit(self): + def test_fifo_limit(self) -> None: c = ConflictTxCache(200) # each item has cost 80 items = [make_item(i) for i in range(1, 4)] @@ -127,7 +130,7 @@ def test_fifo_limit(self): tx = c.drain() assert tx == {items[-2].spend_bundle_name: items[-2], items[-1].spend_bundle_name: items[-1]} - def test_item_limit(self): + def test_item_limit(self) -> None: c = ConflictTxCache(1000000, 2) # each item has cost 80 items = [make_item(i) for i in range(1, 4)] @@ -138,7 +141,7 @@ def test_item_limit(self): tx = c.drain() assert tx == {items[-2].spend_bundle_name: items[-2], items[-1].spend_bundle_name: items[-1]} - def test_drain(self): + def test_drain(self) -> None: c = ConflictTxCache(100) item = make_item(1) c.add(item) @@ -149,7 +152,7 @@ def test_drain(self): tx = c.drain() assert tx == {} - def test_cost(self): + def test_cost(self) -> None: c = ConflictTxCache(200) assert c.cost() == 0 item1 = make_item(1) @@ -179,15 +182,15 @@ def test_cost(self): class TestPendingTxCache: - def test_recall(self): + def test_recall(self) -> None: c = PendingTxCache(100) item = make_item(1) c.add(item) assert c.get(item.name) == item - tx = c.drain(101) + tx = c.drain(uint32(101)) assert tx == {item.spend_bundle_name: item} - def test_fifo_limit(self): + def test_fifo_limit(self) -> None: c = PendingTxCache(200) # each item has cost 80 items = [make_item(i) for i in range(1, 4)] @@ -195,24 +198,24 @@ def test_fifo_limit(self): c.add(i) # the max cost is 200, only two transactions will fit # the eviction is FIFO because all items have the same assert_height - tx = c.drain(101) + tx = c.drain(uint32(101)) assert tx == {items[-2].spend_bundle_name: items[-2], items[-1].spend_bundle_name: items[-1]} - def test_add_eviction(self): + def test_add_eviction(self) -> None: c = PendingTxCache(160) - item = make_item(1, assert_height=100) + item = make_item(1) c.add(item) for i in range(3): - item = make_item(i + 1, assert_height=50) + item = make_item(i + 1, assert_height=uint32(50)) c.add(item) - txs = c.drain(161) + txs = c.drain(uint32(161)) assert len(txs) == 2 for tx in txs.values(): assert tx.assert_height == 50 - def test_item_limit(self): + def test_item_limit(self) -> None: c = PendingTxCache(1000000, 2) # each item has cost 80 items = [make_item(i) for i in range(1, 4)] @@ -220,21 +223,21 @@ def test_item_limit(self): c.add(i) # the max size is 2, only two transactions will fit # the eviction is FIFO because all items have the same assert_height - tx = c.drain(101) + tx = c.drain(uint32(101)) assert tx == {items[-2].spend_bundle_name: items[-2], items[-1].spend_bundle_name: items[-1]} - def test_drain(self): + def test_drain(self) -> None: c = PendingTxCache(100) item = make_item(1) c.add(item) - tx = c.drain(101) + tx = c.drain(uint32(101)) assert tx == {item.spend_bundle_name: item} # drain will clear the cache, so a second call will be empty - tx = c.drain(101) + tx = c.drain(uint32(101)) assert tx == {} - def test_cost(self): + def test_cost(self) -> None: c = PendingTxCache(200) assert c.cost() == 0 item1 = make_item(1) @@ -251,7 +254,7 @@ def test_cost(self): c.add(item3) assert c.cost() == 160 - tx = c.drain(101) + tx = c.drain(uint32(101)) assert tx == {item2.spend_bundle_name: item2, item3.spend_bundle_name: item3} assert c.cost() == 0 @@ -259,22 +262,22 @@ def test_cost(self): c.add(item4) assert c.cost() == 80 - tx = c.drain(101) + tx = c.drain(uint32(101)) assert tx == {item4.spend_bundle_name: item4} - def test_drain_height(self): + def test_drain_height(self) -> None: c = PendingTxCache(20000, 1000) # each item has cost 80 # heights are 100-109 - items = [make_item(i, 80, 100 + i) for i in range(10)] + items = [make_item(i, assert_height=uint32(100 + i)) for i in range(10)] for i in items: c.add(i) - tx = c.drain(101) + tx = c.drain(uint32(101)) assert tx == {items[0].spend_bundle_name: items[0]} - tx = c.drain(105) + tx = c.drain(uint32(105)) assert tx == { items[1].spend_bundle_name: items[1], items[2].spend_bundle_name: items[2], @@ -282,10 +285,10 @@ def test_drain_height(self): items[4].spend_bundle_name: items[4], } - tx = c.drain(105) + tx = c.drain(uint32(105)) assert tx == {} - tx = c.drain(110) + tx = c.drain(uint32(110)) assert tx == { items[5].spend_bundle_name: items[5], items[6].spend_bundle_name: items[6], @@ -297,7 +300,9 @@ def test_drain_height(self): class TestMempool: @pytest.mark.anyio - async def test_basic_mempool(self, one_node_one_block, wallet_a): + async def test_basic_mempool( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block _ = await next_block(full_node_1, wallet_a, bt) @@ -340,7 +345,7 @@ async def respond_transaction( return ret -async def next_block(full_node_1, wallet_a, bt) -> Coin: +async def next_block(full_node_1: FullNodeSimulator, wallet_a: WalletTool, bt: BlockTools) -> Coin: blocks = await full_node_1.get_all_full_blocks() # we have to farm a new block here, to ensure every test has a unique coin to test spending. # all this could be simplified if the tests did not share a simulation @@ -352,7 +357,7 @@ async def next_block(full_node_1, wallet_a, bt) -> Coin: guarantee_transaction_block=True, farmer_reward_puzzle_hash=reward_ph, pool_reward_puzzle_hash=reward_ph, - genesis_timestamp=10000, + genesis_timestamp=uint64(10_000), time_per_block=10, ) @@ -381,7 +386,12 @@ async def gen_and_send_sb(node: FullNodeAPI, wallet: WalletTool, coin: Coin, fee class TestMempoolManager: @pytest.mark.anyio - async def test_basic_mempool_manager(self, two_nodes_one_block, wallet_a, self_hostname): + async def test_basic_mempool_manager( + self, + two_nodes_one_block: Tuple[FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, BlockTools], + wallet_a: WalletTool, + self_hostname: str, + ) -> None: full_node_1, full_node_2, server_1, server_2, bt = two_nodes_one_block peer = await connect_and_get_peer(server_1, server_2, self_hostname) @@ -437,7 +447,14 @@ async def test_basic_mempool_manager(self, two_nodes_one_block, wallet_a, self_h (co.ASSERT_SECONDS_ABSOLUTE, 10052, mis.FAILED), ], ) - async def test_ephemeral_timelock(self, one_node_one_block, wallet_a, opcode, lock_value, expected): + async def test_ephemeral_timelock( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + opcode: ConditionOpcode, + lock_value: int, + expected: MempoolInclusionStatus, + ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: conditions = {opcode: [ConditionWithArgs(opcode, [int_to_bytes(lock_value)])]} tx1 = wallet_a.generate_signed_transaction(uint64(1000000), wallet_a.get_new_puzzlehash(), coin_2) @@ -471,7 +488,9 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: # this test makes sure that one spend successfully asserts the announce from # another spend, even though the assert condition is duplicated 100 times @pytest.mark.anyio - async def test_coin_announcement_duplicate_consumed(self, one_node_one_block, wallet_a): + async def test_coin_announcement_duplicate_consumed( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=b"test") cvp = ConditionWithArgs(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, [announce.msg_calc]) @@ -495,7 +514,9 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: # this test makes sure that one spend successfully asserts the announce from # another spend, even though the create announcement is duplicated 100 times @pytest.mark.anyio - async def test_coin_duplicate_announcement_consumed(self, one_node_one_block, wallet_a): + async def test_coin_duplicate_announcement_consumed( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=b"test") cvp = ConditionWithArgs(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, [announce.msg_calc]) @@ -517,7 +538,12 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_double_spend(self, two_nodes_one_block, wallet_a, self_hostname): + async def test_double_spend( + self, + two_nodes_one_block: Tuple[FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, BlockTools], + wallet_a: WalletTool, + self_hostname: str, + ) -> None: reward_ph = wallet_a.get_new_puzzlehash() full_node_1, full_node_2, server_1, server_2, bt = two_nodes_one_block blocks = await full_node_1.get_all_full_blocks() @@ -560,14 +586,16 @@ async def test_double_spend(self, two_nodes_one_block, wallet_a, self_hostname): assert sb2 is None assert status == MempoolInclusionStatus.PENDING - def assert_sb_in_pool(self, node, sb): + def assert_sb_in_pool(self, node: FullNodeSimulator, sb: SpendBundle) -> None: assert sb == node.full_node.mempool_manager.get_spendbundle(sb.name()) - def assert_sb_not_in_pool(self, node, sb): + def assert_sb_not_in_pool(self, node: FullNodeSimulator, sb: SpendBundle) -> None: assert node.full_node.mempool_manager.get_spendbundle(sb.name()) is None @pytest.mark.anyio - async def test_double_spend_with_higher_fee(self, one_node_one_block, wallet_a): + async def test_double_spend_with_higher_fee( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, _, bt = one_node_one_block blocks = await full_node_1.get_all_full_blocks() start_height = blocks[-1].height if len(blocks) > 0 else -1 @@ -606,7 +634,7 @@ async def test_double_spend_with_higher_fee(self, one_node_one_block, wallet_a): invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) sb2 = generate_test_spend_bundle(wallet_a, coin2, fee=MEMPOOL_MIN_FEE_INCREASE) - sb12 = SpendBundle.aggregate((sb2, sb1_3)) + sb12 = SpendBundle.aggregate([sb2, sb1_3]) await send_sb(full_node_1, sb12) # Aggregated spendbundle sb12 replaces sb1_3 since it spends a superset @@ -616,7 +644,7 @@ async def test_double_spend_with_higher_fee(self, one_node_one_block, wallet_a): invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) sb3 = generate_test_spend_bundle(wallet_a, coin3, fee=uint64(MEMPOOL_MIN_FEE_INCREASE * 2)) - sb23 = SpendBundle.aggregate((sb2, sb3)) + sb23 = SpendBundle.aggregate([sb2, sb3]) await send_sb(full_node_1, sb23) # sb23 must not replace existing sb12 as the former does not spend all @@ -631,14 +659,14 @@ async def test_double_spend_with_higher_fee(self, one_node_one_block, wallet_a): invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) sb4_1 = generate_test_spend_bundle(wallet_a, coin4, fee=MEMPOOL_MIN_FEE_INCREASE) - sb1234_1 = SpendBundle.aggregate((sb12, sb3, sb4_1)) + sb1234_1 = SpendBundle.aggregate([sb12, sb3, sb4_1]) await send_sb(full_node_1, sb1234_1) # sb1234_1 should not be in pool as it decreases total fees per cost self.assert_sb_not_in_pool(full_node_1, sb1234_1) invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) sb4_2 = generate_test_spend_bundle(wallet_a, coin4, fee=uint64(MEMPOOL_MIN_FEE_INCREASE * 2)) - sb1234_2 = SpendBundle.aggregate((sb12, sb3, sb4_2)) + sb1234_2 = SpendBundle.aggregate([sb12, sb3, sb4_2]) await send_sb(full_node_1, sb1234_2) # sb1234_2 has a higher fee per cost than its conflicts and should get # into mempool @@ -648,7 +676,9 @@ async def test_double_spend_with_higher_fee(self, one_node_one_block, wallet_a): invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) @pytest.mark.anyio - async def test_invalid_signature(self, one_node_one_block, wallet_a): + async def test_invalid_signature( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: reward_ph = wallet_a.get_new_puzzlehash() full_node_1, server_1, bt = one_node_one_block @@ -681,13 +711,13 @@ async def test_invalid_signature(self, one_node_one_block, wallet_a): async def condition_tester( self, - one_node_one_block, - wallet_a, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, dic: Dict[ConditionOpcode, List[ConditionWithArgs]], fee: int = 0, num_blocks: int = 3, coin: Optional[Coin] = None, - ): + ) -> Tuple[List[FullBlock], SpendBundle, WSChiaConnection, MempoolInclusionStatus, Optional[Err]]: reward_ph = wallet_a.get_new_puzzlehash() full_node_1, server_1, bt = one_node_one_block blocks = await full_node_1.get_all_full_blocks() @@ -724,7 +754,12 @@ async def condition_tester( return blocks, spend_bundle1, dummy_peer, status, err @pytest.mark.anyio - async def condition_tester2(self, node_server_bt, wallet_a, test_fun: Callable[[Coin, Coin], SpendBundle]): + async def condition_tester2( + self, + node_server_bt: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + test_fun: Callable[[Coin, Coin], SpendBundle], + ) -> Tuple[List[FullBlock], SpendBundle, MempoolInclusionStatus, Optional[Err]]: reward_ph = wallet_a.get_new_puzzlehash() full_node_1, server_1, bt = node_server_bt blocks = await full_node_1.get_all_full_blocks() @@ -761,7 +796,9 @@ async def condition_tester2(self, node_server_bt, wallet_a, test_fun: Callable[[ return blocks, bundle, status, err @pytest.mark.anyio - async def test_invalid_block_index(self, one_node_one_block, wallet_a): + async def test_invalid_block_index( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block blocks = await full_node_1.get_all_full_blocks() start_height = blocks[-1].height @@ -778,7 +815,9 @@ async def test_invalid_block_index(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.PENDING @pytest.mark.anyio - async def test_block_index_missing_arg(self, one_node_one_block, wallet_a): + async def test_block_index_missing_arg( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, []) dic = {ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE: [cvp]} @@ -790,7 +829,9 @@ async def test_block_index_missing_arg(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_correct_block_index(self, one_node_one_block, wallet_a): + async def test_correct_block_index( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, [int_to_bytes(1)]) dic = {ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE: [cvp]} @@ -801,7 +842,9 @@ async def test_correct_block_index(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_block_index_garbage(self, one_node_one_block, wallet_a): + async def test_block_index_garbage( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block # garbage at the end of the argument list is ignored in consensus mode, # but not in mempool-mode @@ -814,7 +857,9 @@ async def test_block_index_garbage(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_negative_block_index(self, one_node_one_block, wallet_a): + async def test_negative_block_index( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, [int_to_bytes(-1)]) dic = {ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE: [cvp]} @@ -825,7 +870,9 @@ async def test_negative_block_index(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_invalid_block_age(self, one_node_one_block, wallet_a): + async def test_invalid_block_age( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, [int_to_bytes(5)]) dic = {cvp.opcode: [cvp]} @@ -837,7 +884,9 @@ async def test_invalid_block_age(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.PENDING @pytest.mark.anyio - async def test_block_age_missing_arg(self, one_node_one_block, wallet_a): + async def test_block_age_missing_arg( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, []) dic = {cvp.opcode: [cvp]} @@ -849,7 +898,9 @@ async def test_block_age_missing_arg(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_correct_block_age(self, one_node_one_block, wallet_a): + async def test_correct_block_age( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, [int_to_bytes(1)]) dic = {cvp.opcode: [cvp]} @@ -863,7 +914,9 @@ async def test_correct_block_age(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_block_age_garbage(self, one_node_one_block, wallet_a): + async def test_block_age_garbage( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block # garbage at the end of the argument list is ignored in consensus mode, # but not in mempool mode @@ -879,7 +932,9 @@ async def test_block_age_garbage(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_negative_block_age(self, one_node_one_block, wallet_a): + async def test_negative_block_age( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, [int_to_bytes(-1)]) dic = {cvp.opcode: [cvp]} @@ -893,7 +948,9 @@ async def test_negative_block_age(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_correct_my_id(self, one_node_one_block, wallet_a): + async def test_correct_my_id( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block _ = await next_block(full_node_1, wallet_a, bt) @@ -911,7 +968,9 @@ async def test_correct_my_id(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_my_id_garbage(self, one_node_one_block, wallet_a): + async def test_my_id_garbage( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block _ = await next_block(full_node_1, wallet_a, bt) @@ -931,7 +990,9 @@ async def test_my_id_garbage(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_invalid_my_id(self, one_node_one_block, wallet_a): + async def test_invalid_my_id( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block _ = await next_block(full_node_1, wallet_a, bt) @@ -950,7 +1011,9 @@ async def test_invalid_my_id(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_my_id_missing_arg(self, one_node_one_block, wallet_a): + async def test_my_id_missing_arg( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_COIN_ID, []) dic = {cvp.opcode: [cvp]} @@ -962,47 +1025,63 @@ async def test_my_id_missing_arg(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_assert_time_exceeds(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block + async def test_assert_time_exceeds( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + full_node_1, _, _ = one_node_one_block + blockchain_peak = full_node_1.full_node.blockchain.get_peak() + assert blockchain_peak is not None + assert blockchain_peak.timestamp is not None # 5 seconds should be before the next block - time_now = full_node_1.full_node.blockchain.get_peak().timestamp + 5 + time_now = blockchain_peak.timestamp + 5 cvp = ConditionWithArgs(ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, [int_to_bytes(time_now)]) dic = {cvp.opcode: [cvp]} - blocks, spend_bundle1, peer, status, err = await self.condition_tester(one_node_one_block, wallet_a, dic) + _, spend_bundle1, _, status, err = await self.condition_tester(one_node_one_block, wallet_a, dic) sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) assert err is None assert sb1 == spend_bundle1 assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_assert_time_fail(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - time_now = full_node_1.full_node.blockchain.get_peak().timestamp + 1000 + async def test_assert_time_fail( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + full_node_1, _, _ = one_node_one_block + blockchain_peak = full_node_1.full_node.blockchain.get_peak() + assert blockchain_peak is not None + assert blockchain_peak.timestamp is not None + time_now = blockchain_peak.timestamp + 1000 cvp = ConditionWithArgs(ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, [int_to_bytes(time_now)]) dic = {cvp.opcode: [cvp]} - blocks, spend_bundle1, peer, status, err = await self.condition_tester(one_node_one_block, wallet_a, dic) + _, spend_bundle1, peer, status, err = await self.condition_tester(one_node_one_block, wallet_a, dic) sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) assert err == Err.ASSERT_SECONDS_ABSOLUTE_FAILED assert sb1 is None assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_assert_height_pending(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - current_height = full_node_1.full_node.blockchain.get_peak().height + async def test_assert_height_pending( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + full_node_1, _, _ = one_node_one_block + blockchain_peak = full_node_1.full_node.blockchain.get_peak() + assert blockchain_peak is not None + current_height = blockchain_peak.height cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, [int_to_bytes(current_height + 4)]) dic = {cvp.opcode: [cvp]} - blocks, spend_bundle1, peer, status, err = await self.condition_tester(one_node_one_block, wallet_a, dic) + _, spend_bundle1, _, status, err = await self.condition_tester(one_node_one_block, wallet_a, dic) sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) assert err == Err.ASSERT_HEIGHT_ABSOLUTE_FAILED assert sb1 is None assert status == MempoolInclusionStatus.PENDING @pytest.mark.anyio - async def test_assert_time_negative(self, one_node_one_block, wallet_a): + async def test_assert_time_negative( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block time_now = -1 @@ -1015,7 +1094,9 @@ async def test_assert_time_negative(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_assert_time_missing_arg(self, one_node_one_block, wallet_a): + async def test_assert_time_missing_arg( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, []) @@ -1027,22 +1108,29 @@ async def test_assert_time_missing_arg(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_assert_time_garbage(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - time_now = full_node_1.full_node.blockchain.get_peak().timestamp + 5 + async def test_assert_time_garbage( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + full_node_1, _, _ = one_node_one_block + blockchain_peak = full_node_1.full_node.blockchain.get_peak() + assert blockchain_peak is not None + assert blockchain_peak.timestamp is not None + time_now = blockchain_peak.timestamp + 5 # garbage at the end of the argument list is ignored in consensus mode, # but not in mempool mode cvp = ConditionWithArgs(ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, [int_to_bytes(time_now), b"garbage"]) dic = {cvp.opcode: [cvp]} - blocks, spend_bundle1, peer, status, err = await self.condition_tester(one_node_one_block, wallet_a, dic) + _, spend_bundle1, _, status, err = await self.condition_tester(one_node_one_block, wallet_a, dic) sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) assert err is Err.INVALID_CONDITION assert sb1 is None assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_assert_time_relative_exceeds(self, one_node_one_block, wallet_a): + async def test_assert_time_relative_exceeds( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block time_relative = 3 @@ -1068,7 +1156,9 @@ async def test_assert_time_relative_exceeds(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_assert_time_relative_garbage(self, one_node_one_block, wallet_a): + async def test_assert_time_relative_garbage( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block time_relative = 0 @@ -1084,7 +1174,9 @@ async def test_assert_time_relative_garbage(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_assert_time_relative_missing_arg(self, one_node_one_block, wallet_a): + async def test_assert_time_relative_missing_arg( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_SECONDS_RELATIVE, []) @@ -1097,7 +1189,9 @@ async def test_assert_time_relative_missing_arg(self, one_node_one_block, wallet assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_assert_time_relative_negative(self, one_node_one_block, wallet_a): + async def test_assert_time_relative_negative( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block time_relative = -3 @@ -1112,7 +1206,9 @@ async def test_assert_time_relative_negative(self, one_node_one_block, wallet_a) # ensure one spend can assert a coin announcement from another spend @pytest.mark.anyio - async def test_correct_coin_announcement_consumed(self, one_node_one_block, wallet_a): + async def test_correct_coin_announcement_consumed( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=b"test") cvp = ConditionWithArgs(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, [announce.msg_calc]) @@ -1145,8 +1241,14 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: ], ) async def test_coin_announcement_garbage( - self, assert_garbage, announce_garbage, expected, expected_included, one_node_one_block, wallet_a - ): + self, + assert_garbage: bool, + announce_garbage: bool, + expected: Optional[Err], + expected_included: MempoolInclusionStatus, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=b"test") # garbage at the end is ignored in consensus mode, but not in @@ -1178,10 +1280,10 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: assert mempool_bundle == bundle @pytest.mark.anyio - async def test_coin_announcement_missing_arg(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - - def test_fun(coin_1: Coin, coin_2: Coin): + async def test_coin_announcement_missing_arg( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: # missing arg here cvp = ConditionWithArgs(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, []) dic = {cvp.opcode: [cvp]} @@ -1192,17 +1294,18 @@ def test_fun(coin_1: Coin, coin_2: Coin): return SpendBundle.aggregate([spend_bundle1, spend_bundle2]) - blocks, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) + full_node_1, _, _ = one_node_one_block + _, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) assert err == Err.INVALID_CONDITION assert full_node_1.full_node.mempool_manager.get_spendbundle(bundle.name()) is None assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_coin_announcement_missing_arg2(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - - def test_fun(coin_1: Coin, coin_2: Coin): + async def test_coin_announcement_missing_arg2( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=b"test") cvp = ConditionWithArgs(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, [announce.msg_calc]) dic = {cvp.opcode: [cvp]} @@ -1214,17 +1317,18 @@ def test_fun(coin_1: Coin, coin_2: Coin): return SpendBundle.aggregate([spend_bundle1, spend_bundle2]) - blocks, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) + full_node_1, _, _ = one_node_one_block + _, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) assert err == Err.INVALID_CONDITION assert full_node_1.full_node.mempool_manager.get_spendbundle(bundle.name()) is None assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_coin_announcement_too_big(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - - def test_fun(coin_1: Coin, coin_2: Coin): + async def test_coin_announcement_too_big( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=bytes([1] * 10000)) cvp = ConditionWithArgs(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, [announce.msg_calc]) @@ -1238,6 +1342,7 @@ def test_fun(coin_1: Coin, coin_2: Coin): return SpendBundle.aggregate([spend_bundle1, spend_bundle2]) + full_node_1, _, bt = one_node_one_block blocks, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) assert err == Err.ASSERT_ANNOUNCE_CONSUMED_FAILED @@ -1256,10 +1361,10 @@ def test_fun(coin_1: Coin, coin_2: Coin): # ensure an assert coin announcement is rejected if it doesn't match the # create announcement @pytest.mark.anyio - async def test_invalid_coin_announcement_rejected(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - - def test_fun(coin_1: Coin, coin_2: Coin): + async def test_invalid_coin_announcement_rejected( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=b"test") cvp = ConditionWithArgs(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, [announce.msg_calc]) @@ -1276,7 +1381,8 @@ def test_fun(coin_1: Coin, coin_2: Coin): return SpendBundle.aggregate([spend_bundle1, spend_bundle2]) - blocks, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) + full_node_1, _, _ = one_node_one_block + _, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) mempool_bundle = full_node_1.full_node.mempool_manager.get_spendbundle(bundle.name()) @@ -1285,10 +1391,10 @@ def test_fun(coin_1: Coin, coin_2: Coin): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_invalid_coin_announcement_rejected_two(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - - def test_fun(coin_1: Coin, coin_2: Coin): + async def test_invalid_coin_announcement_rejected_two( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_1.name(), asserted_msg=b"test") cvp = ConditionWithArgs(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, [announce.msg_calc]) @@ -1303,7 +1409,8 @@ def test_fun(coin_1: Coin, coin_2: Coin): return SpendBundle.aggregate([spend_bundle1, spend_bundle2]) - blocks, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) + full_node_1, _, _ = one_node_one_block + _, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) mempool_bundle = full_node_1.full_node.mempool_manager.get_spendbundle(bundle.name()) assert err == Err.ASSERT_ANNOUNCE_CONSUMED_FAILED @@ -1311,10 +1418,10 @@ def test_fun(coin_1: Coin, coin_2: Coin): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_correct_puzzle_announcement(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - - def test_fun(coin_1: Coin, coin_2: Coin): + async def test_correct_puzzle_announcement( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertPuzzleAnnouncement(asserted_ph=coin_2.puzzle_hash, asserted_msg=bytes(0x80)) cvp = ConditionWithArgs(ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, [announce.msg_calc]) @@ -1328,7 +1435,8 @@ def test_fun(coin_1: Coin, coin_2: Coin): return SpendBundle.aggregate([spend_bundle1, spend_bundle2]) - blocks, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) + full_node_1, _, _ = one_node_one_block + _, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) mempool_bundle = full_node_1.full_node.mempool_manager.get_spendbundle(bundle.name()) @@ -1346,11 +1454,15 @@ def test_fun(coin_1: Coin, coin_2: Coin): ], ) async def test_puzzle_announcement_garbage( - self, assert_garbage, announce_garbage, expected, expected_included, one_node_one_block, wallet_a - ): - full_node_1, server_1, bt = one_node_one_block - - def test_fun(coin_1: Coin, coin_2: Coin): + self, + assert_garbage: bool, + announce_garbage: bool, + expected: Optional[Err], + expected_included: MempoolInclusionStatus, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: + def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertPuzzleAnnouncement(asserted_ph=coin_2.puzzle_hash, asserted_msg=bytes(0x80)) # garbage at the end is ignored in consensus mode, but not in @@ -1371,7 +1483,8 @@ def test_fun(coin_1: Coin, coin_2: Coin): return SpendBundle.aggregate([spend_bundle1, spend_bundle2]) - blocks, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) + full_node_1, _, _ = one_node_one_block + _, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) assert err is expected assert status == expected_included @@ -1380,10 +1493,10 @@ def test_fun(coin_1: Coin, coin_2: Coin): assert mempool_bundle == bundle @pytest.mark.anyio - async def test_puzzle_announcement_missing_arg(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - - def test_fun(coin_1: Coin, coin_2: Coin): + async def test_puzzle_announcement_missing_arg( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: # missing arg here cvp = ConditionWithArgs(ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, []) dic = {cvp.opcode: [cvp]} @@ -1397,7 +1510,8 @@ def test_fun(coin_1: Coin, coin_2: Coin): return SpendBundle.aggregate([spend_bundle1, spend_bundle2]) - blocks, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) + full_node_1, _, _ = one_node_one_block + _, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) mempool_bundle = full_node_1.full_node.mempool_manager.get_spendbundle(bundle.name()) @@ -1406,10 +1520,10 @@ def test_fun(coin_1: Coin, coin_2: Coin): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_puzzle_announcement_missing_arg2(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - - def test_fun(coin_1: Coin, coin_2: Coin): + async def test_puzzle_announcement_missing_arg2( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertPuzzleAnnouncement(asserted_ph=coin_2.puzzle_hash, asserted_msg=b"test") cvp = ConditionWithArgs(ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, [announce.msg_calc]) @@ -1425,8 +1539,8 @@ def test_fun(coin_1: Coin, coin_2: Coin): return SpendBundle.aggregate([spend_bundle1, spend_bundle2]) - blocks, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) - + full_node_1, _, _ = one_node_one_block + _, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) mempool_bundle = full_node_1.full_node.mempool_manager.get_spendbundle(bundle.name()) assert err == Err.INVALID_CONDITION @@ -1434,10 +1548,10 @@ def test_fun(coin_1: Coin, coin_2: Coin): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_invalid_puzzle_announcement_rejected(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - - def test_fun(coin_1: Coin, coin_2: Coin): + async def test_invalid_puzzle_announcement_rejected( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertPuzzleAnnouncement(asserted_ph=coin_2.puzzle_hash, asserted_msg=bytes("test", "utf-8")) cvp = ConditionWithArgs(ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, [announce.msg_calc]) @@ -1454,7 +1568,8 @@ def test_fun(coin_1: Coin, coin_2: Coin): return SpendBundle.aggregate([spend_bundle1, spend_bundle2]) - blocks, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) + full_node_1, _, _ = one_node_one_block + _, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) mempool_bundle = full_node_1.full_node.mempool_manager.get_spendbundle(bundle.name()) @@ -1463,10 +1578,10 @@ def test_fun(coin_1: Coin, coin_2: Coin): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_invalid_puzzle_announcement_rejected_two(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block - - def test_fun(coin_1: Coin, coin_2: Coin): + async def test_invalid_puzzle_announcement_rejected_two( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: + def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertPuzzleAnnouncement(asserted_ph=coin_2.puzzle_hash, asserted_msg=bytes(0x80)) cvp = ConditionWithArgs(ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, [announce.msg_calc]) @@ -1483,7 +1598,8 @@ def test_fun(coin_1: Coin, coin_2: Coin): return SpendBundle.aggregate([spend_bundle1, spend_bundle2]) - blocks, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) + full_node_1, _, _ = one_node_one_block + _, bundle, status, err = await self.condition_tester2(one_node_one_block, wallet_a, test_fun) mempool_bundle = full_node_1.full_node.mempool_manager.get_spendbundle(bundle.name()) @@ -1492,7 +1608,9 @@ def test_fun(coin_1: Coin, coin_2: Coin): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_assert_fee_condition(self, one_node_one_block, wallet_a): + async def test_assert_fee_condition( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.RESERVE_FEE, [int_to_bytes(10)]) dic = {cvp.opcode: [cvp]} @@ -1506,7 +1624,9 @@ async def test_assert_fee_condition(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_assert_fee_condition_garbage(self, one_node_one_block, wallet_a): + async def test_assert_fee_condition_garbage( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block # garbage at the end of the arguments is ignored in consensus mode, but # not in mempool mode @@ -1522,7 +1642,9 @@ async def test_assert_fee_condition_garbage(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_assert_fee_condition_missing_arg(self, one_node_one_block, wallet_a): + async def test_assert_fee_condition_missing_arg( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.RESERVE_FEE, []) dic = {cvp.opcode: [cvp]} @@ -1536,7 +1658,9 @@ async def test_assert_fee_condition_missing_arg(self, one_node_one_block, wallet assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_assert_fee_condition_negative_fee(self, one_node_one_block, wallet_a): + async def test_assert_fee_condition_negative_fee( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.RESERVE_FEE, [int_to_bytes(-1)]) dic = {cvp.opcode: [cvp]} @@ -1554,7 +1678,9 @@ async def test_assert_fee_condition_negative_fee(self, one_node_one_block, walle ) @pytest.mark.anyio - async def test_assert_fee_condition_fee_too_large(self, one_node_one_block, wallet_a): + async def test_assert_fee_condition_fee_too_large( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.RESERVE_FEE, [int_to_bytes(2**64)]) dic = {cvp.opcode: [cvp]} @@ -1572,7 +1698,9 @@ async def test_assert_fee_condition_fee_too_large(self, one_node_one_block, wall ) @pytest.mark.anyio - async def test_assert_fee_condition_wrong_fee(self, one_node_one_block, wallet_a): + async def test_assert_fee_condition_wrong_fee( + self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.RESERVE_FEE, [int_to_bytes(10)]) @@ -1585,9 +1713,13 @@ async def test_assert_fee_condition_wrong_fee(self, one_node_one_block, wallet_a assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_stealing_fee(self, two_nodes_one_block, wallet_a): + async def test_stealing_fee( + self, + two_nodes_one_block: Tuple[FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: reward_ph = wallet_a.get_new_puzzlehash() - full_node_1, full_node_2, server_1, server_2, bt = two_nodes_one_block + full_node_1, _, server_1, server_2, bt = two_nodes_one_block blocks = await full_node_1.get_all_full_blocks() start_height = blocks[-1].height blocks = bt.get_consecutive_blocks( @@ -1617,6 +1749,7 @@ async def test_stealing_fee(self, two_nodes_one_block, wallet_a): for coin in blocks[-1].get_included_reward_coins(): if coin.amount == coin_1.amount: coin_2 = coin + assert coin_2 is not None spend_bundle1 = generate_test_spend_bundle(wallet_a, coin_1, dic, uint64(fee)) steal_fee_spendbundle = wallet_a.generate_signed_transaction( @@ -1641,9 +1774,13 @@ async def test_stealing_fee(self, two_nodes_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_double_spend_same_bundle(self, two_nodes_one_block, wallet_a): + async def test_double_spend_same_bundle( + self, + two_nodes_one_block: Tuple[FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: reward_ph = wallet_a.get_new_puzzlehash() - full_node_1, full_node_2, server_1, server_2, bt = two_nodes_one_block + full_node_1, _, server_1, server_2, bt = two_nodes_one_block blocks = await full_node_1.get_all_full_blocks() start_height = blocks[-1].height blocks = bt.get_consecutive_blocks( @@ -1675,7 +1812,7 @@ async def test_double_spend_same_bundle(self, two_nodes_one_block, wallet_a): spend_bundle_combined = SpendBundle.aggregate([spend_bundle1, spend_bundle2]) - tx: full_node_protocol.RespondTransaction = full_node_protocol.RespondTransaction(spend_bundle_combined) + tx = full_node_protocol.RespondTransaction(spend_bundle_combined) peer = await connect_and_get_peer(server_1, server_2, bt.config["self_hostname"]) status, err = await respond_transaction(full_node_1, tx, peer, test=True) @@ -1686,7 +1823,11 @@ async def test_double_spend_same_bundle(self, two_nodes_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_agg_sig_condition(self, one_node_one_block, wallet_a): + async def test_agg_sig_condition( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: reward_ph = wallet_a.get_new_puzzlehash() full_node_1, server_1, bt = one_node_one_block blocks = await full_node_1.get_all_full_blocks() @@ -1731,7 +1872,11 @@ async def test_agg_sig_condition(self, one_node_one_block, wallet_a): # assert sb is spend_bundle @pytest.mark.anyio - async def test_correct_my_parent(self, one_node_one_block, wallet_a): + async def test_correct_my_parent( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block _ = await next_block(full_node_1, wallet_a, bt) @@ -1750,7 +1895,11 @@ async def test_correct_my_parent(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_my_parent_garbage(self, one_node_one_block, wallet_a): + async def test_my_parent_garbage( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block _ = await next_block(full_node_1, wallet_a, bt) @@ -1771,7 +1920,11 @@ async def test_my_parent_garbage(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_my_parent_missing_arg(self, one_node_one_block, wallet_a): + async def test_my_parent_missing_arg( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_PARENT_ID, []) dic = {cvp.opcode: [cvp]} @@ -1784,7 +1937,11 @@ async def test_my_parent_missing_arg(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_invalid_my_parent(self, one_node_one_block, wallet_a): + async def test_invalid_my_parent( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block _ = await next_block(full_node_1, wallet_a, bt) @@ -1804,7 +1961,11 @@ async def test_invalid_my_parent(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_correct_my_puzhash(self, one_node_one_block, wallet_a): + async def test_correct_my_puzhash( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block _ = await next_block(full_node_1, wallet_a, bt) @@ -1823,7 +1984,11 @@ async def test_correct_my_puzhash(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_my_puzhash_garbage(self, one_node_one_block, wallet_a): + async def test_my_puzhash_garbage( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block _ = await next_block(full_node_1, wallet_a, bt) @@ -1843,7 +2008,11 @@ async def test_my_puzhash_garbage(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_my_puzhash_missing_arg(self, one_node_one_block, wallet_a): + async def test_my_puzhash_missing_arg( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_PUZZLEHASH, []) dic = {cvp.opcode: [cvp]} @@ -1856,7 +2025,11 @@ async def test_my_puzhash_missing_arg(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_invalid_my_puzhash(self, one_node_one_block, wallet_a): + async def test_invalid_my_puzhash( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block _ = await next_block(full_node_1, wallet_a, bt) @@ -1875,7 +2048,11 @@ async def test_invalid_my_puzhash(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_correct_my_amount(self, one_node_one_block, wallet_a): + async def test_correct_my_amount( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block _ = await next_block(full_node_1, wallet_a, bt) @@ -1894,7 +2071,11 @@ async def test_correct_my_amount(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.anyio - async def test_my_amount_garbage(self, one_node_one_block, wallet_a): + async def test_my_amount_garbage( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block _ = await next_block(full_node_1, wallet_a, bt) @@ -1915,7 +2096,11 @@ async def test_my_amount_garbage(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_my_amount_missing_arg(self, one_node_one_block, wallet_a): + async def test_my_amount_missing_arg( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_AMOUNT, []) dic = {cvp.opcode: [cvp]} @@ -1928,7 +2113,11 @@ async def test_my_amount_missing_arg(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_invalid_my_amount(self, one_node_one_block, wallet_a): + async def test_invalid_my_amount( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_AMOUNT, [int_to_bytes(1000)]) dic = {cvp.opcode: [cvp]} @@ -1941,7 +2130,11 @@ async def test_invalid_my_amount(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_negative_my_amount(self, one_node_one_block, wallet_a): + async def test_negative_my_amount( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_AMOUNT, [int_to_bytes(-1)]) dic = {cvp.opcode: [cvp]} @@ -1954,11 +2147,15 @@ async def test_negative_my_amount(self, one_node_one_block, wallet_a): assert status == MempoolInclusionStatus.FAILED @pytest.mark.anyio - async def test_my_amount_too_large(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block + async def test_my_amount_too_large( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: + full_node_1, _, _ = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_AMOUNT, [int_to_bytes(2**64)]) dic = {cvp.opcode: [cvp]} - blocks, spend_bundle1, peer, status, err = await self.condition_tester(one_node_one_block, wallet_a, dic) + _, spend_bundle1, _, status, err = await self.condition_tester(one_node_one_block, wallet_a, dic) sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) @@ -1994,12 +2191,13 @@ def generator_condition_tester( class TestGeneratorConditions: - def test_invalid_condition_args_terminator(self, softfork_height): + def test_invalid_condition_args_terminator(self, softfork_height: uint32) -> None: # note how the condition argument list isn't correctly terminated with a # NIL atom. This is allowed, and all arguments beyond the ones we look # at are ignored, including the termination of the list npc_result = generator_condition_tester("(80 50 . 1)", height=softfork_height) assert npc_result.error is None + assert npc_result.conds is not None assert len(npc_result.conds.spends) == 1 assert npc_result.conds.spends[0].seconds_relative == 50 @@ -2012,7 +2210,7 @@ def test_invalid_condition_args_terminator(self, softfork_height): (False, 1, None), ], ) - def test_div(self, mempool, operand, expected, softfork_height): + def test_div(self, mempool: bool, operand: int, expected: Optional[int], softfork_height: uint32) -> None: # op_div is disallowed on negative numbers in the mempool, and after the # softfork npc_result = generator_condition_tester( @@ -2028,7 +2226,7 @@ def test_div(self, mempool, operand, expected, softfork_height): assert npc_result.error == expected - def test_invalid_condition_list_terminator(self, softfork_height): + def test_invalid_condition_list_terminator(self, softfork_height: uint32) -> None: # note how the list of conditions isn't correctly terminated with a # NIL atom. This is a failure npc_result = generator_condition_tester("(80 50) . 3", height=softfork_height) @@ -2043,7 +2241,7 @@ def test_invalid_condition_list_terminator(self, softfork_height): ConditionOpcode.ASSERT_SECONDS_RELATIVE, ], ) - def test_duplicate_height_time_conditions(self, opcode, softfork_height): + def test_duplicate_height_time_conditions(self, opcode: ConditionOpcode, softfork_height: uint32) -> None: # even though the generator outputs multiple conditions, we only # need to return the highest one (i.e. most strict) npc_result = generator_condition_tester( @@ -2051,6 +2249,7 @@ def test_duplicate_height_time_conditions(self, opcode, softfork_height): ) print(npc_result) assert npc_result.error is None + assert npc_result.conds is not None assert len(npc_result.conds.spends) == 1 assert len(npc_result.conds.spends) == 1 @@ -2070,12 +2269,13 @@ def test_duplicate_height_time_conditions(self, opcode, softfork_height): ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, ], ) - def test_just_announcement(self, opcode, softfork_height): + def test_just_announcement(self, opcode: ConditionOpcode, softfork_height: uint32) -> None: message = "a" * 1024 # announcements are validated on the Rust side and never returned # back. They are either satisified or cause an immediate failure npc_result = generator_condition_tester(f'({opcode.value[0]} "{message}") ' * 50, height=softfork_height) assert npc_result.error is None + assert npc_result.conds is not None assert len(npc_result.conds.spends) == 1 # create-announcements and assert-announcements are dropped once # validated @@ -2087,7 +2287,7 @@ def test_just_announcement(self, opcode, softfork_height): ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, ], ) - def test_assert_announcement_fail(self, opcode, softfork_height): + def test_assert_announcement_fail(self, opcode: ConditionOpcode, softfork_height: uint32) -> None: message = "a" * 1024 # announcements are validated on the Rust side and never returned # back. They ar either satisified or cause an immediate failure @@ -2097,17 +2297,18 @@ def test_assert_announcement_fail(self, opcode, softfork_height): print(npc_result) assert npc_result.error == Err.ASSERT_ANNOUNCE_CONSUMED_FAILED.value - def test_multiple_reserve_fee(self, softfork_height): + def test_multiple_reserve_fee(self, softfork_height: uint32) -> None: # RESERVE_FEE cond = 52 # even though the generator outputs 3 conditions, we only need to return one copy # with all the fees accumulated npc_result = generator_condition_tester(f"({cond} 10) " * 3, height=softfork_height) assert npc_result.error is None + assert npc_result.conds is not None assert npc_result.conds.reserve_fee == 30 assert len(npc_result.conds.spends) == 1 - def test_duplicate_outputs(self, softfork_height): + def test_duplicate_outputs(self, softfork_height: uint32) -> None: # CREATE_COIN # creating multiple coins with the same properties (same parent, same # target puzzle hash and same amount) is not allowed. That's a consensus @@ -2116,7 +2317,7 @@ def test_duplicate_outputs(self, softfork_height): npc_result = generator_condition_tester(f'(51 "{puzzle_hash}" 10) ' * 2, height=softfork_height) assert npc_result.error == Err.DUPLICATE_OUTPUT.value - def test_create_coin_cost(self, softfork_height): + def test_create_coin_cost(self, softfork_height: uint32) -> None: # CREATE_COIN puzzle_hash = "abababababababababababababababab" @@ -2158,7 +2359,7 @@ def test_create_coin_cost(self, softfork_height): ConditionOpcode.AGG_SIG_ME, ], ) - def test_agg_sig_cost(self, condition, softfork_height): + def test_agg_sig_cost(self, condition: ConditionOpcode, softfork_height: uint32) -> None: pubkey = "0x" + bytes(G1Element.generator()).hex() if softfork_height >= test_constants.HARD_FORK_HEIGHT: @@ -2216,7 +2417,9 @@ def test_agg_sig_cost(self, condition, softfork_height): ) @pytest.mark.parametrize("extra_arg", [' "baz"', ""]) @pytest.mark.parametrize("mempool", [True, False]) - def test_agg_sig_extra_arg(self, condition, extra_arg, mempool, softfork_height): + def test_agg_sig_extra_arg( + self, condition: ConditionOpcode, extra_arg: str, mempool: bool, softfork_height: uint32 + ) -> None: pubkey = "0x" + bytes(G1Element.generator()).hex() new_condition = condition in [ @@ -2263,11 +2466,12 @@ def test_agg_sig_extra_arg(self, condition, extra_arg, mempool, softfork_height) ) assert npc_result.error == expected_error if npc_result.error is None: + assert npc_result.conds is not None assert len(npc_result.conds.spends) == 1 else: assert npc_result.conds is None - def test_create_coin_different_parent(self, softfork_height): + def test_create_coin_different_parent(self, softfork_height: uint32) -> None: # if the coins we create have different parents, they are never # considered duplicate, even when they have the same puzzle hash and # amount @@ -2282,11 +2486,12 @@ def test_create_coin_different_parent(self, softfork_height): generator, MAX_BLOCK_COST_CLVM, mempool_mode=False, height=softfork_height, constants=test_constants ) assert npc_result.error is None + assert npc_result.conds is not None assert len(npc_result.conds.spends) == 2 for s in npc_result.conds.spends: assert s.create_coin == [(puzzle_hash.encode("ascii"), 10, None)] - def test_create_coin_different_puzzhash(self, softfork_height): + def test_create_coin_different_puzzhash(self, softfork_height: uint32) -> None: # CREATE_COIN # coins with different puzzle hashes are not considered duplicate puzzle_hash_1 = "abababababababababababababababab" @@ -2295,11 +2500,12 @@ def test_create_coin_different_puzzhash(self, softfork_height): f'(51 "{puzzle_hash_1}" 5) (51 "{puzzle_hash_2}" 5)', height=softfork_height ) assert npc_result.error is None + assert npc_result.conds is not None assert len(npc_result.conds.spends) == 1 assert (puzzle_hash_1.encode("ascii"), 5, None) in npc_result.conds.spends[0].create_coin assert (puzzle_hash_2.encode("ascii"), 5, None) in npc_result.conds.spends[0].create_coin - def test_create_coin_different_amounts(self, softfork_height): + def test_create_coin_different_amounts(self, softfork_height: uint32) -> None: # CREATE_COIN # coins with different amounts are not considered duplicate puzzle_hash = "abababababababababababababababab" @@ -2307,17 +2513,19 @@ def test_create_coin_different_amounts(self, softfork_height): f'(51 "{puzzle_hash}" 5) (51 "{puzzle_hash}" 4)', height=softfork_height ) assert npc_result.error is None + assert npc_result.conds is not None assert len(npc_result.conds.spends) == 1 coins = npc_result.conds.spends[0].create_coin assert (puzzle_hash.encode("ascii"), 5, None) in coins assert (puzzle_hash.encode("ascii"), 4, None) in coins - def test_create_coin_with_hint(self, softfork_height): + def test_create_coin_with_hint(self, softfork_height: uint32) -> None: # CREATE_COIN puzzle_hash_1 = "abababababababababababababababab" hint = "12341234123412341234213421341234" npc_result = generator_condition_tester(f'(51 "{puzzle_hash_1}" 5 ("{hint}"))', height=softfork_height) assert npc_result.error is None + assert npc_result.conds is not None assert len(npc_result.conds.spends) == 1 coins = npc_result.conds.spends[0].create_coin assert coins == [(puzzle_hash_1.encode("ascii"), 5, hint.encode("ascii"))] @@ -2334,7 +2542,7 @@ def test_create_coin_with_hint(self, softfork_height): '(0x1ff "foobar")', ], ) - def test_unknown_condition(self, mempool: bool, condition: str, softfork_height: uint32): + def test_unknown_condition(self, mempool: bool, condition: str, softfork_height: uint32) -> None: npc_result = generator_condition_tester(condition, mempool_mode=mempool, height=softfork_height) print(npc_result) if mempool: @@ -2354,7 +2562,7 @@ def test_unknown_condition(self, mempool: bool, condition: str, softfork_height: ) def test_softfork_condition( self, mempool: bool, condition: str, expect_error: Optional[int], softfork_height: uint32 - ): + ) -> None: npc_result = generator_condition_tester(condition, mempool_mode=mempool, height=softfork_height) print(npc_result) @@ -2378,7 +2586,7 @@ def test_softfork_condition( ) def test_message_condition( self, mempool: bool, condition: str, expect_error: Optional[int], softfork_height: uint32 - ): + ) -> None: npc_result = generator_condition_tester(condition, mempool_mode=mempool, height=softfork_height) print(npc_result) @@ -2516,7 +2724,9 @@ class TestMaliciousGenerators: ConditionOpcode.ASSERT_SECONDS_RELATIVE, ], ) - def test_duplicate_large_integer_ladder(self, opcode, softfork_height, benchmark_runner: BenchmarkRunner): + def test_duplicate_large_integer_ladder( + self, opcode: ConditionOpcode, softfork_height: uint32, benchmark_runner: BenchmarkRunner + ) -> None: condition = SINGLE_ARG_INT_LADDER_COND.format(opcode=opcode.value[0], num=28, filler="0x00") with benchmark_runner.assert_runtime(seconds=1): @@ -2533,7 +2743,9 @@ def test_duplicate_large_integer_ladder(self, opcode, softfork_height, benchmark ConditionOpcode.ASSERT_SECONDS_RELATIVE, ], ) - def test_duplicate_large_integer(self, opcode, softfork_height, benchmark_runner: BenchmarkRunner): + def test_duplicate_large_integer( + self, opcode: ConditionOpcode, softfork_height: uint32, benchmark_runner: BenchmarkRunner + ) -> None: condition = SINGLE_ARG_INT_COND.format(opcode=opcode.value[0], num=280000, val=100, filler="0x00") with benchmark_runner.assert_runtime(seconds=3): @@ -2550,7 +2762,9 @@ def test_duplicate_large_integer(self, opcode, softfork_height, benchmark_runner ConditionOpcode.ASSERT_SECONDS_RELATIVE, ], ) - def test_duplicate_large_integer_substr(self, opcode, softfork_height, benchmark_runner: BenchmarkRunner): + def test_duplicate_large_integer_substr( + self, opcode: ConditionOpcode, softfork_height: uint32, benchmark_runner: BenchmarkRunner + ) -> None: condition = SINGLE_ARG_INT_SUBSTR_COND.format(opcode=opcode.value[0], num=280000, val=100, filler="0x00") with benchmark_runner.assert_runtime(seconds=2): @@ -2567,7 +2781,9 @@ def test_duplicate_large_integer_substr(self, opcode, softfork_height, benchmark ConditionOpcode.ASSERT_SECONDS_RELATIVE, ], ) - def test_duplicate_large_integer_substr_tail(self, opcode, softfork_height, benchmark_runner: BenchmarkRunner): + def test_duplicate_large_integer_substr_tail( + self, opcode: ConditionOpcode, softfork_height: uint32, benchmark_runner: BenchmarkRunner + ) -> None: condition = SINGLE_ARG_INT_SUBSTR_TAIL_COND.format( opcode=opcode.value[0], num=280, val="0xffffffff", filler="0x00" ) @@ -2586,7 +2802,9 @@ def test_duplicate_large_integer_substr_tail(self, opcode, softfork_height, benc ConditionOpcode.ASSERT_SECONDS_RELATIVE, ], ) - def test_duplicate_large_integer_negative(self, opcode, softfork_height, benchmark_runner: BenchmarkRunner): + def test_duplicate_large_integer_negative( + self, opcode: ConditionOpcode, softfork_height: uint32, benchmark_runner: BenchmarkRunner + ) -> None: condition = SINGLE_ARG_INT_COND.format(opcode=opcode.value[0], num=280000, val=100, filler="0xff") with benchmark_runner.assert_runtime(seconds=2.75): @@ -2596,7 +2814,7 @@ def test_duplicate_large_integer_negative(self, opcode, softfork_height, benchma assert npc_result.conds is not None assert len(npc_result.conds.spends) == 1 - def test_duplicate_reserve_fee(self, softfork_height, benchmark_runner: BenchmarkRunner): + def test_duplicate_reserve_fee(self, softfork_height: uint32, benchmark_runner: BenchmarkRunner) -> None: opcode = ConditionOpcode.RESERVE_FEE condition = SINGLE_ARG_INT_COND.format(opcode=opcode.value[0], num=280000, val=100, filler="0x00") @@ -2605,7 +2823,7 @@ def test_duplicate_reserve_fee(self, softfork_height, benchmark_runner: Benchmar assert npc_result.error == error_for_condition(opcode) - def test_duplicate_reserve_fee_negative(self, softfork_height, benchmark_runner: BenchmarkRunner): + def test_duplicate_reserve_fee_negative(self, softfork_height: uint32, benchmark_runner: BenchmarkRunner) -> None: opcode = ConditionOpcode.RESERVE_FEE condition = SINGLE_ARG_INT_COND.format(opcode=opcode.value[0], num=200000, val=100, filler="0xff") @@ -2620,7 +2838,9 @@ def test_duplicate_reserve_fee_negative(self, softfork_height, benchmark_runner: @pytest.mark.parametrize( "opcode", [ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT] ) - def test_duplicate_coin_announces(self, opcode, softfork_height, benchmark_runner: BenchmarkRunner): + def test_duplicate_coin_announces( + self, opcode: ConditionOpcode, softfork_height: uint32, benchmark_runner: BenchmarkRunner + ) -> None: # we only allow 1024 create- or assert announcements per spend condition = CREATE_ANNOUNCE_COND.format(opcode=opcode.value[0], num=1024) @@ -2633,7 +2853,7 @@ def test_duplicate_coin_announces(self, opcode, softfork_height, benchmark_runne # coin announcements are not propagated to python, but validated in rust # TODO: optimize clvm to make this run in < 1 second - def test_create_coin_duplicates(self, softfork_height, benchmark_runner: BenchmarkRunner): + def test_create_coin_duplicates(self, softfork_height: uint32, benchmark_runner: BenchmarkRunner) -> None: # CREATE_COIN # this program will emit 6000 identical CREATE_COIN conditions. However, # we'll just end up looking at two of them, and fail at the first @@ -2646,7 +2866,7 @@ def test_create_coin_duplicates(self, softfork_height, benchmark_runner: Benchma assert npc_result.error == Err.DUPLICATE_OUTPUT.value assert npc_result.conds is None - def test_many_create_coin(self, softfork_height, benchmark_runner: BenchmarkRunner): + def test_many_create_coin(self, softfork_height: uint32, benchmark_runner: BenchmarkRunner) -> None: # CREATE_COIN # this program will emit many CREATE_COIN conditions, all with different # amounts. @@ -2665,8 +2885,12 @@ def test_many_create_coin(self, softfork_height, benchmark_runner: BenchmarkRunn assert len(spend.create_coin) == 6094 @pytest.mark.anyio - async def test_invalid_coin_spend_coin(self, one_node_one_block, wallet_a): - full_node_1, server_1, bt = one_node_one_block + async def test_invalid_coin_spend_coin( + self, + one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + wallet_a: WalletTool, + ) -> None: + full_node_1, _, bt = one_node_one_block reward_ph = wallet_a.get_new_puzzlehash() blocks = bt.get_consecutive_blocks( 5, @@ -3004,7 +3228,7 @@ def agg_and_add_sb_returning_cost_info(mempool: Mempool, spend_bundles: List[Spe invariant_check_mempool(mempool) -def test_get_puzzle_and_solution_for_coin_failure(): +def test_get_puzzle_and_solution_for_coin_failure() -> None: with pytest.raises( ValueError, match=f"Failed to get puzzle and solution for coin {TEST_COIN}, error: \\('coin not found', '80'\\)" ): diff --git a/mypy-exclusions.txt b/mypy-exclusions.txt index 2cc15dd243e0..a2d7a2c07e49 100644 --- a/mypy-exclusions.txt +++ b/mypy-exclusions.txt @@ -62,7 +62,6 @@ chia._tests.core.full_node.test_full_node chia._tests.core.full_node.test_node_load chia._tests.core.full_node.test_performance chia._tests.core.full_node.test_transactions -chia._tests.core.mempool.test_mempool chia._tests.core.server.test_dos chia._tests.core.server.test_rate_limits chia._tests.core.ssl.test_ssl From e0c36e8afd181616880f8e53de0be404ce1ad5f6 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Thu, 20 Jun 2024 14:10:14 +0100 Subject: [PATCH 04/77] CHIA-767 Simplify invariant_check_mempool (#18176) Simplify invariant_check_mempool --- chia/_tests/util/misc.py | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/chia/_tests/util/misc.py b/chia/_tests/util/misc.py index df9b5be29b3c..295fd16d4ae1 100644 --- a/chia/_tests/util/misc.py +++ b/chia/_tests/util/misc.py @@ -522,19 +522,10 @@ def create_logger(file: TextIO = sys.stdout) -> logging.Logger: def invariant_check_mempool(mempool: Mempool) -> None: - with mempool._db_conn: - cursor = mempool._db_conn.execute("SELECT SUM(cost) FROM tx") - val = cursor.fetchone()[0] - if val is None: - val = 0 - assert mempool._total_cost == val - - with mempool._db_conn: - cursor = mempool._db_conn.execute("SELECT SUM(fee) FROM tx") - val = cursor.fetchone()[0] - if val is None: - val = 0 - assert mempool._total_fee == val + with mempool._db_conn as conn: + cursor = conn.execute("SELECT COALESCE(SUM(cost), 0), COALESCE(SUM(fee), 0) FROM tx") + val = cursor.fetchone() + assert (mempool._total_cost, mempool._total_fee) == val async def wallet_height_at_least(wallet_node: WalletNode, h: uint32) -> bool: From ff3067815b30c91c3db233e8a977ed4a60e0b0ba Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Thu, 20 Jun 2024 07:58:03 -0700 Subject: [PATCH 05/77] (pylint) `==` -> `is` (#18207) `==` -> `is` --- chia/_tests/util/test_action_scope.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia/_tests/util/test_action_scope.py b/chia/_tests/util/test_action_scope.py index c8fe9add714b..56ee3c1ec23c 100644 --- a/chia/_tests/util/test_action_scope.py +++ b/chia/_tests/util/test_action_scope.py @@ -29,7 +29,7 @@ async def default_async_callback(interface: StateInterface[TestSideEffects]) -> def test_set_callback() -> None: state_interface = StateInterface(TestSideEffects(), True) state_interface.set_callback(default_async_callback) - assert state_interface._callback == default_async_callback + assert state_interface._callback is default_async_callback state_interface_no_callbacks = StateInterface(TestSideEffects(), False) with pytest.raises(RuntimeError, match="Callback cannot be edited from inside itself"): state_interface_no_callbacks.set_callback(None) From 6ed1fb02bdd30ed2e174a173bfeed9e37a2c56ec Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Fri, 21 Jun 2024 06:59:11 -0700 Subject: [PATCH 06/77] [CHIA-754] Use tx_config in cancel_pending_trades (#18169) Use tx_config in cancel_pending_trades --- chia/wallet/trade_manager.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/chia/wallet/trade_manager.py b/chia/wallet/trade_manager.py index 7b92e24b2e14..66266996f05b 100644 --- a/chia/wallet/trade_manager.py +++ b/chia/wallet/trade_manager.py @@ -299,7 +299,9 @@ async def cancel_pending_offers( self.log.error(f"Cannot find wallet for offer {trade.trade_id}, skip cancellation.") continue - new_ph = await wallet.wallet_state_manager.main_wallet.get_new_puzzlehash() + new_ph = await wallet.wallet_state_manager.main_wallet.get_puzzle_hash( + new=(not tx_config.reuse_puzhash) + ) if len(trade_records) > 1 or len(cancellation_coins) > 1: announcement_conditions: Tuple[Condition, ...] = ( From 495d401cf1f1c26e82ec5759c864ed2cdbd590fd Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Fri, 21 Jun 2024 14:59:52 +0100 Subject: [PATCH 07/77] CHIA-736 Annotate trade_manager.py (#18138) Annotate trade_manager.py. --- chia/wallet/trade_manager.py | 65 +++++++++++++++++++++++++----------- 1 file changed, 45 insertions(+), 20 deletions(-) diff --git a/chia/wallet/trade_manager.py b/chia/wallet/trade_manager.py index 66266996f05b..28928a71cd2d 100644 --- a/chia/wallet/trade_manager.py +++ b/chia/wallet/trade_manager.py @@ -4,7 +4,7 @@ import logging import time from collections import deque -from typing import Any, Deque, Dict, List, Optional, Set, Tuple, Union, cast +from typing import TYPE_CHECKING, Any, Deque, Dict, List, Optional, Set, Tuple, Union from typing_extensions import Literal, Never @@ -18,6 +18,7 @@ from chia.util.db_wrapper import DBWrapper2 from chia.util.hash import std_hash from chia.util.ints import uint32, uint64 +from chia.wallet.cat_wallet.cat_wallet import CATWallet from chia.wallet.conditions import ( AssertCoinAnnouncement, Condition, @@ -47,6 +48,10 @@ from chia.wallet.vc_wallet.vc_wallet import VCWallet from chia.wallet.wallet import Wallet from chia.wallet.wallet_coin_record import WalletCoinRecord +from chia.wallet.wallet_protocol import WalletProtocol + +if TYPE_CHECKING: + from chia.wallet.wallet_state_manager import WalletStateManager OFFER_MOD = load_clvm_maybe_recompile("settlement_payments.clsp") @@ -88,7 +93,7 @@ class TradeManager: - get_wallet_for_asset_id(asset_id: bytes32) -> """ - wallet_state_manager: Any + wallet_state_manager: WalletStateManager log: logging.Logger trade_store: TradeStore most_recently_deserialized_trade: Optional[Tuple[bytes32, Offer]] @@ -182,6 +187,7 @@ async def coins_of_interest_farmed( # If any of our settlement_payments were spent, this offer was a success! if set(our_addition_ids) == set(coin_state_names): height = coin_states[0].created_height + assert height is not None await self.trade_store.set_status(trade.trade_id, TradeStatus.CONFIRMED, index=height) tx_records: List[TransactionRecord] = await self.calculate_tx_records_for_offer(offer, False) for tx in tx_records: @@ -217,15 +223,11 @@ async def get_locked_coins(self) -> Dict[bytes32, WalletCoinRecord]: # TODO: # - No need to get the coin records here, we are only interested in the coin_id on the call site. - # - The cast here is required for now because TradeManager.wallet_state_manager is hinted as Any. - return cast( - Dict[bytes32, WalletCoinRecord], - ( - await self.wallet_state_manager.coin_store.get_coin_records( - coin_id_filter=HashFilter.include(coins_of_interest) - ) - ).coin_id_to_record, - ) + return ( + await self.wallet_state_manager.coin_store.get_coin_records( + coin_id_filter=HashFilter.include(coins_of_interest) + ) + ).coin_id_to_record async def get_all_trades(self) -> List[TradeRecord]: all: List[TradeRecord] = await self.trade_store.get_all_trades() @@ -312,6 +314,7 @@ async def cancel_pending_offers( announcement_conditions = tuple() # This should probably not switch on whether or not we're spending a XCH but it has to for now if wallet.type() == WalletType.STANDARD_WALLET: + assert isinstance(wallet, Wallet) if fee_to_pay > coin.amount: selected_coins: Set[Coin] = await wallet.select_coins( uint64(fee_to_pay - coin.amount), @@ -339,6 +342,7 @@ async def cancel_pending_offers( all_txs.append(dataclasses.replace(tx, spend_bundle=None)) else: # ATTENTION: new_wallets + assert isinstance(wallet, (CATWallet, DataLayerWallet, NFTWallet)) txs = await wallet.generate_signed_transaction( [coin.amount], [new_ph], @@ -486,10 +490,12 @@ async def _create_offer_for_ids( memos: List[bytes] = [] if isinstance(id, int): wallet_id = uint32(id) - wallet = self.wallet_state_manager.wallets[wallet_id] + wallet = self.wallet_state_manager.wallets.get(wallet_id) + assert isinstance(wallet, (CATWallet, Wallet)) p2_ph: bytes32 = await wallet.get_puzzle_hash(new=not tx_config.reuse_puzhash) if wallet.type() != WalletType.STANDARD_WALLET: if callable(getattr(wallet, "get_asset_id", None)): # ATTENTION: new wallets + assert isinstance(wallet, CATWallet) asset_id = bytes32(bytes.fromhex(wallet.get_asset_id())) memos = [p2_ph] else: @@ -511,6 +517,7 @@ async def _create_offer_for_ids( wallet = self.wallet_state_manager.wallets[wallet_id] if wallet.type() != WalletType.STANDARD_WALLET: if callable(getattr(wallet, "get_asset_id", None)): # ATTENTION: new wallets + assert isinstance(wallet, CATWallet) asset_id = bytes32(bytes.fromhex(wallet.get_asset_id())) else: raise ValueError( @@ -519,15 +526,26 @@ async def _create_offer_for_ids( else: asset_id = id wallet = await self.wallet_state_manager.get_wallet_for_asset_id(asset_id.hex()) + assert wallet is not None if not callable(getattr(wallet, "get_coins_to_offer", None)): # ATTENTION: new wallets raise ValueError(f"Cannot offer coins from wallet id {wallet.id()}") # For the XCH wallet also include the fee amount to the coins we use to pay this offer amount_to_select = abs(amount) if wallet.type() == WalletType.STANDARD_WALLET: amount_to_select += fee - coins_to_offer[id] = await wallet.get_coins_to_offer( - asset_id, uint64(amount_to_select), tx_config.coin_selection_config - ) + assert isinstance(wallet, (CATWallet, DataLayerWallet, NFTWallet, Wallet)) + if isinstance(wallet, DataLayerWallet): + assert asset_id is not None + coins_to_offer[id] = await wallet.get_coins_to_offer(launcher_id=asset_id) + elif isinstance(wallet, NFTWallet): + assert asset_id is not None + coins_to_offer[id] = await wallet.get_coins_to_offer(nft_id=asset_id) + else: + coins_to_offer[id] = await wallet.get_coins_to_offer( + asset_id=asset_id, + amount=uint64(amount_to_select), + coin_selection_config=tx_config.coin_selection_config, + ) # Note: if we use check_for_special_offer_making, this is not used. elif amount == 0: raise ValueError("You cannot offer nor request 0 amount of something") @@ -536,6 +554,7 @@ async def _create_offer_for_ids( if asset_id is not None and wallet is not None: # if this asset is not XCH if callable(getattr(wallet, "get_puzzle_info", None)): + assert isinstance(wallet, (CATWallet, DataLayerWallet, NFTWallet)) puzzle_driver: PuzzleInfo = await wallet.get_puzzle_info(asset_id) if asset_id in driver_dict and driver_dict[asset_id] != puzzle_driver: # ignore the case if we're an nft transferring the did owner @@ -581,13 +600,15 @@ async def _create_offer_for_ids( for id in sorted(coins_to_offer.keys(), key=lambda id: id != 1): selected_coins = coins_to_offer[id] if isinstance(id, int): - wallet = self.wallet_state_manager.wallets[id] + wallet = self.wallet_state_manager.wallets.get(uint32(id)) else: wallet = await self.wallet_state_manager.get_wallet_for_asset_id(id.hex()) # This should probably not switch on whether or not we're spending XCH but it has to for now + assert wallet is not None if wallet.type() == WalletType.STANDARD_WALLET: + assert isinstance(wallet, Wallet) [tx] = await wallet.generate_signed_transaction( - abs(offer_dict[id]), + uint64(abs(offer_dict[id])), Offer.ph(), tx_config, fee=fee_left_to_pay, @@ -596,6 +617,7 @@ async def _create_offer_for_ids( ) all_transactions.append(tx) elif wallet.type() == WalletType.NFT: + assert isinstance(wallet, NFTWallet) # This is to generate the tx for specific nft assets, i.e. not using # wallet_id as the selector which would select any coins from nft_wallet amounts = [coin.amount for coin in selected_coins] @@ -611,8 +633,9 @@ async def _create_offer_for_ids( all_transactions.extend(txs) else: # ATTENTION: new_wallets + assert isinstance(wallet, (CATWallet, DataLayerWallet)) txs = await wallet.generate_signed_transaction( - [abs(offer_dict[id])], + [uint64(abs(offer_dict[id]))], [Offer.ph()], tx_config, fee=fee_left_to_pay, @@ -642,7 +665,7 @@ async def maybe_create_wallets_for_offer(self, offer: Offer) -> None: if key is None: continue # ATTENTION: new_wallets - exists: Optional[Wallet] = await wsm.get_wallet_for_puzzle_info(offer.driver_dict[key]) + exists = await wsm.get_wallet_for_puzzle_info(offer.driver_dict[key]) if exists is None: await wsm.create_wallet_for_puzzle_info(offer.driver_dict[key]) @@ -700,6 +723,7 @@ async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) -> if wallet_identifier is not None: if addition.parent_coin_info in settlement_coin_ids: wallet = self.wallet_state_manager.wallets[wallet_identifier.id] + assert isinstance(wallet, (CATWallet, NFTWallet, Wallet)) to_puzzle_hash = await wallet.convert_puzzle_hash(addition.puzzle_hash) # ATTENTION: new wallets txs.append( TransactionRecord( @@ -801,7 +825,8 @@ async def respond_to_offer( for asset_id, amount in arbitrage.items(): if asset_id is None: - wallet = self.wallet_state_manager.main_wallet + wallet: Optional[WalletProtocol[Any]] = self.wallet_state_manager.main_wallet + assert wallet is not None key: Union[bytes32, int] = int(wallet.id()) else: # ATTENTION: new wallets From c5e7a021167aa99ffb5f9991fac35ddeb5fbd4b6 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Fri, 21 Jun 2024 15:04:36 +0100 Subject: [PATCH 08/77] CHIA-763 Inline additions_for_npc into MempoolItem's additions (#18172) Inline additions_for_npc into MempoolItem's additions. --- chia/types/mempool_item.py | 9 +++++++-- chia/util/generator_tools.py | 14 -------------- 2 files changed, 7 insertions(+), 16 deletions(-) diff --git a/chia/types/mempool_item.py b/chia/types/mempool_item.py index d27f0a3f65ca..de2c41feda2e 100644 --- a/chia/types/mempool_item.py +++ b/chia/types/mempool_item.py @@ -8,7 +8,6 @@ from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.coin_spend import CoinSpend from chia.types.spend_bundle import SpendBundle -from chia.util.generator_tools import additions_for_npc from chia.util.ints import uint32, uint64 from chia.util.streamable import recurse_jsonify @@ -62,7 +61,13 @@ def cost(self) -> uint64: @property def additions(self) -> List[Coin]: - return additions_for_npc(self.npc_result) + assert self.npc_result.conds is not None + additions: List[Coin] = [] + for spend in self.npc_result.conds.spends: + for puzzle_hash, amount, _ in spend.create_coin: + coin = Coin(spend.coin_id, puzzle_hash, uint64(amount)) + additions.append(coin) + return additions @property def removals(self) -> List[Coin]: diff --git a/chia/util/generator_tools.py b/chia/util/generator_tools.py index 389adc4afd89..25790d8cd308 100644 --- a/chia/util/generator_tools.py +++ b/chia/util/generator_tools.py @@ -4,7 +4,6 @@ from chiabip158 import PyBIP158 -from chia.consensus.cost_calculator import NPCResult from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.full_block import FullBlock @@ -44,19 +43,6 @@ def get_block_header( ) -def additions_for_npc(npc_result: NPCResult) -> List[Coin]: - additions: List[Coin] = [] - - if npc_result.conds is None: - return [] - for spend in npc_result.conds.spends: - for puzzle_hash, amount, _ in spend.create_coin: - coin = Coin(spend.coin_id, puzzle_hash, uint64(amount)) - additions.append(coin) - - return additions - - def tx_removals_and_additions(results: Optional[SpendBundleConditions]) -> Tuple[List[bytes32], List[Coin]]: """ Doesn't return farmer and pool reward. From 1d20bf336bbf2634dea1bced795641b56155f04d Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Fri, 21 Jun 2024 15:04:46 +0100 Subject: [PATCH 09/77] CHIA-764 Make mk_item aware of bundle_coin_spends (#18173) Make mk_item aware of bundle_coin_spends. --- .../core/mempool/test_mempool_manager.py | 33 ++++++++++++------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/chia/_tests/core/mempool/test_mempool_manager.py b/chia/_tests/core/mempool/test_mempool_manager.py index acc2ebda8a29..6af883e87695 100644 --- a/chia/_tests/core/mempool/test_mempool_manager.py +++ b/chia/_tests/core/mempool/test_mempool_manager.py @@ -738,18 +738,29 @@ def mk_item( ) -> MempoolItem: # we don't actually care about the puzzle and solutions for the purpose of # can_replace() - spends = [make_spend(c, SerializedProgram.to(None), SerializedProgram.to(None)) for c in coins] - spend_bundle = SpendBundle(spends, G2Element()) - npc_result = NPCResult(None, make_test_conds(cost=cost, spend_ids=[c.name() for c in coins])) + spend_ids = [] + coin_spends = [] + bundle_coin_spends = {} + for c in coins: + coin_id = c.name() + spend_ids.append(coin_id) + spend = make_spend(c, SerializedProgram.to(None), SerializedProgram.to(None)) + coin_spends.append(spend) + bundle_coin_spends[coin_id] = BundleCoinSpend( + coin_spend=spend, eligible_for_dedup=False, eligible_for_fast_forward=False, additions=[] + ) + spend_bundle = SpendBundle(coin_spends, G2Element()) + npc_result = NPCResult(None, make_test_conds(cost=cost, spend_ids=spend_ids)) return MempoolItem( - spend_bundle, - uint64(fee), - npc_result, - spend_bundle.name(), - uint32(0), - None if assert_height is None else uint32(assert_height), - None if assert_before_height is None else uint32(assert_before_height), - None if assert_before_seconds is None else uint64(assert_before_seconds), + spend_bundle=spend_bundle, + fee=uint64(fee), + npc_result=npc_result, + spend_bundle_name=spend_bundle.name(), + height_added_to_mempool=uint32(0), + assert_height=None if assert_height is None else uint32(assert_height), + assert_before_height=None if assert_before_height is None else uint32(assert_before_height), + assert_before_seconds=None if assert_before_seconds is None else uint64(assert_before_seconds), + bundle_coin_spends=bundle_coin_spends, ) From 63e6d92b349e4e064f08044190bbf20ae0273339 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Fri, 21 Jun 2024 16:59:04 +0100 Subject: [PATCH 10/77] Update anchor against GUI's main. --- chia-blockchain-gui | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index 4e2c293b954a..79ae09558139 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit 4e2c293b954a517a9d91de66b5fa8fc248c7b889 +Subproject commit 79ae095581393b7a08a3642f1fbb299be1c6128f From eefca33875c44fa955bfe7fd710eb6d0bddb056d Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Fri, 21 Jun 2024 16:32:39 -0400 Subject: [PATCH 11/77] Revert "Revert "Revert "CHIA-414 fixup datalayer benchmark""" (#18223) Revert "Revert "Revert "CHIA-414 fixup datalayer benchmark"" (#18107)" This reverts commit e87d51a99348669d1f82e4b6fd7f7dd4b191cbe4. --- .../_tests/core/data_layer/test_data_store.py | 142 ++++++++---------- chia/_tests/process_junit.py | 42 +----- chia/_tests/util/misc.py | 28 ---- setup.py | 1 - 4 files changed, 73 insertions(+), 140 deletions(-) diff --git a/chia/_tests/core/data_layer/test_data_store.py b/chia/_tests/core/data_layer/test_data_store.py index f05812bf718d..0e04d36310bf 100644 --- a/chia/_tests/core/data_layer/test_data_store.py +++ b/chia/_tests/core/data_layer/test_data_store.py @@ -14,8 +14,6 @@ import aiohttp import aiosqlite -import big_o -import big_o.complexities import pytest from chia._tests.core.data_layer.util import Example, add_0123_example, add_01234567_example @@ -1517,101 +1515,91 @@ async def test_clear_pending_roots_returns_root( assert cleared_root == pending_root +@dataclass +class BatchInsertBenchmarkCase: + pre: int + count: int + limit: float + marks: Marks = () + + @property + def id(self) -> str: + return f"pre={self.pre},count={self.count}" + + +@dataclass +class BatchesInsertBenchmarkCase: + count: int + batch_count: int + limit: float + marks: Marks = () + + @property + def id(self) -> str: + return f"count={self.count},batch_count={self.batch_count}" + + +@datacases( + BatchInsertBenchmarkCase( + pre=0, + count=100, + limit=2.2, + ), + BatchInsertBenchmarkCase( + pre=1_000, + count=100, + limit=4, + ), + BatchInsertBenchmarkCase( + pre=0, + count=1_000, + limit=30, + ), + BatchInsertBenchmarkCase( + pre=1_000, + count=1_000, + limit=36, + ), + BatchInsertBenchmarkCase( + pre=10_000, + count=25_000, + limit=52, + ), +) @pytest.mark.anyio async def test_benchmark_batch_insert_speed( data_store: DataStore, store_id: bytes32, benchmark_runner: BenchmarkRunner, + case: BatchInsertBenchmarkCase, ) -> None: r = random.Random() r.seed("shadowlands", version=2) - test_size = 100 - max_pre_size = 20_000 - # may not be needed if big_o already considers the effect - # TODO: must be > 0 to avoid an issue with the log class? - lowest_considered_n = 2000 - simplicity_bias_percentage = 10 / 100 - - batch_count, remainder = divmod(max_pre_size, test_size) - assert remainder == 0, "the last batch would be a different size" - changelist = [ { "action": "insert", "key": x.to_bytes(32, byteorder="big", signed=False), "value": bytes(r.getrandbits(8) for _ in range(1200)), } - for x in range(max_pre_size) + for x in range(case.pre + case.count) ] - pre = changelist[:max_pre_size] - - records: Dict[int, float] = {} - - total_inserted = 0 - pre_iter = iter(pre) - with benchmark_runner.print_runtime( - label="overall", - clock=time.monotonic, - ): - while True: - pre_batch = list(itertools.islice(pre_iter, test_size)) - if len(pre_batch) == 0: - break - - with benchmark_runner.print_runtime( - label="count", - clock=time.monotonic, - ) as f: - await data_store.insert_batch( - store_id=store_id, - changelist=pre_batch, - # TODO: does this mess up test accuracy? - status=Status.COMMITTED, - ) + pre = changelist[: case.pre] + batch = changelist[case.pre : case.pre + case.count] - records[total_inserted] = f.result().duration - total_inserted += len(pre_batch) - - considered_durations = {n: duration for n, duration in records.items() if n >= lowest_considered_n} - ns = list(considered_durations.keys()) - durations = list(considered_durations.values()) - best_class, fitted = big_o.infer_big_o_class(ns=ns, time=durations) - simplicity_bias = simplicity_bias_percentage * fitted[best_class] - best_class, fitted = big_o.infer_big_o_class(ns=ns, time=durations, simplicity_bias=simplicity_bias) - - print(f"allowed simplicity bias: {simplicity_bias}") - print(big_o.reports.big_o_report(best=best_class, others=fitted)) - - assert isinstance( - best_class, (big_o.complexities.Constant, big_o.complexities.Linear) - ), f"must be constant or linear: {best_class}" - - coefficient_maximums = [0.65, 0.000_25, *(10**-n for n in range(5, 100))] - - coefficients = best_class.coefficients() - paired = list(zip(coefficients, coefficient_maximums)) - assert len(paired) == len(coefficients) - for index, [actual, maximum] in enumerate(paired): - benchmark_runner.record_value( - value=actual, - limit=maximum, - label=f"{type(best_class).__name__} coefficient {index}", + if case.pre > 0: + await data_store.insert_batch( + store_id=store_id, + changelist=pre, + status=Status.COMMITTED, ) - assert actual <= maximum, f"(coefficient {index}) {actual} > {maximum}: {paired}" - -@dataclass -class BatchesInsertBenchmarkCase: - count: int - batch_count: int - limit: float - marks: Marks = () - - @property - def id(self) -> str: - return f"count={self.count},batch_count={self.batch_count}" + with benchmark_runner.assert_runtime(seconds=case.limit): + await data_store.insert_batch( + store_id=store_id, + changelist=batch, + ) @datacases( diff --git a/chia/_tests/process_junit.py b/chia/_tests/process_junit.py index 48b8460ca230..fb1388ba62a6 100644 --- a/chia/_tests/process_junit.py +++ b/chia/_tests/process_junit.py @@ -189,32 +189,6 @@ def main( ) -def format_number(n: float) -> str: - complete = f"{n:.999f}" - integral_digits, decimal_separator, decimal_digits = complete.partition(".") - for index, digit in enumerate(decimal_digits): - if digit != "0": - places = index + 1 - break - else: - places = 0 - - group_size = 3 - - places = ((places + group_size) // group_size) * group_size - decimal_digits = decimal_digits[:places] - - result = "" - result += ",".join( - [integral_digits[start : start + group_size] for start in range(0, len(integral_digits), group_size)] - ) - result += "." - result += " ".join( - [decimal_digits[start : start + group_size] for start in range(0, len(decimal_digits), group_size)] - ) - return result - - def output_benchmark( link_line_separator: str, link_prefix: str, @@ -241,17 +215,17 @@ def output_benchmark( three_sigma_str = "-" if len(result.durations) > 1: durations_mean = mean(result.durations) - mean_str = f"{format_number(durations_mean)} s" + mean_str = f"{durations_mean:.3f} s" try: - three_sigma_str = f"{format_number(durations_mean + 3 * stdev(result.durations))} s" + three_sigma_str = f"{durations_mean + 3 * stdev(result.durations):.3f} s" except StatisticsError: pass durations_max = max(result.durations) - max_str = f"{format_number(durations_max)} s" + max_str = f"{durations_max:.3f} s" - limit_str = f"{format_number(result.limit)} s" + limit_str = f"{result.limit:.3f} s" percent = 100 * durations_max / result.limit if percent >= 100: @@ -318,17 +292,17 @@ def output_time_out_assert( three_sigma_str = "-" if len(result.durations) > 1: durations_mean = mean(result.durations) - mean_str = f"{format_number(durations_mean)} s" + mean_str = f"{durations_mean:.3f} s" try: - three_sigma_str = f"{format_number(durations_mean + 3 * stdev(result.durations))} s" + three_sigma_str = f"{durations_mean + 3 * stdev(result.durations):.3f} s" except StatisticsError: pass durations_max = max(result.durations) - max_str = f"{format_number(durations_max)} s" + max_str = f"{durations_max:.3f} s" - limit_str = f"{format_number(result.limit)} s" + limit_str = f"{result.limit:.3f} s" percent = 100 * durations_max / result.limit if percent >= 100: diff --git a/chia/_tests/util/misc.py b/chia/_tests/util/misc.py index 295fd16d4ae1..69131dec2942 100644 --- a/chia/_tests/util/misc.py +++ b/chia/_tests/util/misc.py @@ -390,34 +390,6 @@ def assert_runtime(self, *args: Any, **kwargs: Any) -> _AssertRuntime: kwargs.setdefault("overhead", self.overhead) return _AssertRuntime(*args, **kwargs) - def print_runtime(self, *args: Any, **kwargs: Any) -> _AssertRuntime: - kwargs.setdefault("enable_assertion", False) - # TODO: ick - kwargs.setdefault("seconds", 1) - kwargs.setdefault("overhead", self.overhead) - return _AssertRuntime(*args, **kwargs) - - def record_value(self, value: float, limit: float, label: str) -> None: - if ether.record_property is not None: - file, line = caller_file_and_line( - relative_to=( - pathlib.Path(chia.__file__).parent.parent, - pathlib.Path(chia._tests.__file__).parent.parent, - ) - ) - data = BenchmarkData( - duration=value, - path=pathlib.Path(file), - line=line, - limit=limit, - label=label, - ) - - ether.record_property( # pylint: disable=E1102 - data.tag, - json.dumps(data.marshal(), ensure_ascii=True, sort_keys=True), - ) - @contextlib.contextmanager def assert_rpc_error(error: str) -> Iterator[None]: diff --git a/setup.py b/setup.py index 10a19a37d0ca..ec095861110f 100644 --- a/setup.py +++ b/setup.py @@ -45,7 +45,6 @@ ] dev_dependencies = [ - "big-o==0.11.0", "build==1.2.1", "coverage==7.5.3", "diff-cover==9.0.0", From 7c54e60910c6db80c2a7d92ed4f2d500e8e7179c Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Fri, 21 Jun 2024 16:33:31 -0400 Subject: [PATCH 12/77] correct release case on mac (#18227) * correct gh release upload casing of macos torrent file (cherry picked from commit 310dbdd6486790d283c4f6fb92bff76483e1093a) * correct s3 upload case --- .github/workflows/build-macos-installers.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-macos-installers.yml b/.github/workflows/build-macos-installers.yml index b5f4bc8d2e42..1e70511e45cd 100644 --- a/.github/workflows/build-macos-installers.yml +++ b/.github/workflows/build-macos-installers.yml @@ -344,7 +344,7 @@ jobs: run: | py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg -o ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent --webseed https://download.chia.net/install/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg ls ${{ github.workspace }}/build_scripts/final_installer/ - gh release upload --repo ${{ github.repository }} $RELEASE_TAG ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent + gh release upload --repo ${{ github.repository }} $RELEASE_TAG ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent - name: Upload Dev Installer if: steps.check_secrets.outputs.HAS_AWS_SECRET && github.ref == 'refs/heads/main' @@ -355,9 +355,9 @@ jobs: - name: Upload Release Files if: steps.check_secrets.outputs.HAS_AWS_SECRET && env.FULL_RELEASE == 'true' run: | - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg s3://download.chia.net/install/ - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.sha256 s3://download.chia.net/install/ - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent s3://download.chia.net/torrents/ + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg s3://download.chia.net/install/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.sha256 s3://download.chia.net/install/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.sha256 + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent s3://download.chia.net/torrents/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent - name: Upload release artifacts if: env.RELEASE == 'true' From 4b2f9f0c7e7e575dd266c238a017b9856b254a1f Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Fri, 21 Jun 2024 16:02:55 -0700 Subject: [PATCH 13/77] Revert "use rust types for RecentChainData, ProofBlockHeader and WeightProof" #17738 (#18231) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Revert "use rust types for RecentChainData, ProofBlockHeader and WeightProof …" This reverts commit 3f0d5c070bfcc76abb6deb2dd118ebfce42ac324. --- chia/types/weight_proof.py | 47 ++++++++++++++++++++++++++++++++++---- 1 file changed, 43 insertions(+), 4 deletions(-) diff --git a/chia/types/weight_proof.py b/chia/types/weight_proof.py index 2670b17f5e9d..6c2b17e7aee0 100644 --- a/chia/types/weight_proof.py +++ b/chia/types/weight_proof.py @@ -1,11 +1,50 @@ from __future__ import annotations +from dataclasses import dataclass +from typing import List + import chia_rs -ProofBlockHeader = chia_rs.ProofBlockHeader -RecentChainData = chia_rs.RecentChainData -SubEpochChallengeSegment = chia_rs.SubEpochChallengeSegment +from chia.types.blockchain_format.reward_chain_block import RewardChainBlock +from chia.types.end_of_slot_bundle import EndOfSubSlotBundle +from chia.types.header_block import HeaderBlock +from chia.util.streamable import Streamable, streamable + SubEpochData = chia_rs.SubEpochData + +# number of challenge blocks +# Average iters for challenge blocks +# |--A-R----R-------R--------R------R----R----------R-----R--R---| Honest difficulty 1000 +# 0.16 + +# compute total reward chain blocks +# |----------------------------A---------------------------------| Attackers chain 1000 +# 0.48 +# total number of challenge blocks == total number of reward chain blocks + + +SubEpochChallengeSegment = chia_rs.SubEpochChallengeSegment SubEpochSegments = chia_rs.SubEpochSegments SubSlotData = chia_rs.SubSlotData -WeightProof = chia_rs.WeightProof + + +@streamable +@dataclass(frozen=True) +# this is used only for serialization to database +class RecentChainData(Streamable): + recent_chain_data: List[HeaderBlock] + + +@streamable +@dataclass(frozen=True) +class ProofBlockHeader(Streamable): + finished_sub_slots: List[EndOfSubSlotBundle] + reward_chain_block: RewardChainBlock + + +@streamable +@dataclass(frozen=True) +class WeightProof(Streamable): + sub_epochs: List[SubEpochData] + sub_epoch_segments: List[SubEpochChallengeSegment] # sampled sub epoch + recent_chain_data: List[HeaderBlock] From 4671193cbc8d89932049dc6851bfc67a99ee0cae Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Mon, 24 Jun 2024 11:05:23 -0700 Subject: [PATCH 14/77] CHIA-802: Update to macos-12 for build and remove macos-11 (#18238) * Update to macos-12 for build and remove macos-11 * Update MACOSX_DEPLOYMENT_TARGET --- .github/workflows/build-macos-installers.yml | 9 ++------- .github/workflows/check_wheel_availability.yaml | 2 +- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build-macos-installers.yml b/.github/workflows/build-macos-installers.yml index dafe88778ae8..4bc696c5a05c 100644 --- a/.github/workflows/build-macos-installers.yml +++ b/.github/workflows/build-macos-installers.yml @@ -52,7 +52,7 @@ jobs: matrix: python-version: ["3.10"] os: - - runs-on: macos-11 + - runs-on: macos-12 name: intel bladebit-suffix: macos-x86-64.tar.gz - runs-on: [MacOS, ARM64] @@ -82,7 +82,7 @@ jobs: uses: Chia-Network/actions/setjobenv@main env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - MACOSX_DEPLOYMENT_TARGET: 11 + MACOSX_DEPLOYMENT_TARGET: 12 - name: Test for secrets access id: check_secrets @@ -378,11 +378,6 @@ jobs: fail-fast: false matrix: os: - - name: 11 - matrix: 11 - runs-on: - intel: macos-11 - arm: [macos, arm64] - name: 12 matrix: 12 runs-on: diff --git a/.github/workflows/check_wheel_availability.yaml b/.github/workflows/check_wheel_availability.yaml index 214e97d9f12e..04f85d37430f 100644 --- a/.github/workflows/check_wheel_availability.yaml +++ b/.github/workflows/check_wheel_availability.yaml @@ -33,7 +33,7 @@ jobs: - name: macOS matrix: macos runs-on: - intel: macos-11 + intel: macos-12 arm: [macos, arm64] - name: Windows matrix: windows From 2e82142396dfe92ac5b984b625abd6c83060490f Mon Sep 17 00:00:00 2001 From: Izumi Hoshino Date: Tue, 25 Jun 2024 03:10:57 +0900 Subject: [PATCH 15/77] Fixed an issue where `npx --no lerna clean -f` didn't work (#18232) Fixed an issue where npx --no lerna clean -f didnot work --- build_scripts/build_linux_deb-1-gui.sh | 2 +- build_scripts/build_linux_rpm-1-gui.sh | 2 +- build_scripts/build_macos-1-gui.sh | 2 +- build_scripts/build_windows-1-gui.ps1 | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/build_scripts/build_linux_deb-1-gui.sh b/build_scripts/build_linux_deb-1-gui.sh index 67b150760211..e27e17901004 100644 --- a/build_scripts/build_linux_deb-1-gui.sh +++ b/build_scripts/build_linux_deb-1-gui.sh @@ -8,7 +8,7 @@ git submodule update --init chia-blockchain-gui cd ./chia-blockchain-gui || exit 1 echo "npm build" -npx --no lerna clean -y # With --no option, `npx` guarantees not to install package from remote registry +npx lerna clean -y # Removes packages/*/node_modules npm ci # Audit fix does not currently work with Lerna. See https://github.com/lerna/lerna/issues/1663 # npm audit fix diff --git a/build_scripts/build_linux_rpm-1-gui.sh b/build_scripts/build_linux_rpm-1-gui.sh index 85e7436f1ec5..e5fe97f08a48 100644 --- a/build_scripts/build_linux_rpm-1-gui.sh +++ b/build_scripts/build_linux_rpm-1-gui.sh @@ -7,7 +7,7 @@ git submodule update --init chia-blockchain-gui cd ./chia-blockchain-gui || exit 1 echo "npm build" -npx --no lerna clean -y # With --no option, `npx` guarantees not to install package from remote registry +npx lerna clean -y # Removes packages/*/node_modules npm ci # Audit fix does not currently work with Lerna. See https://github.com/lerna/lerna/issues/1663 # npm audit fix diff --git a/build_scripts/build_macos-1-gui.sh b/build_scripts/build_macos-1-gui.sh index e51478be2024..456fb94e7bba 100644 --- a/build_scripts/build_macos-1-gui.sh +++ b/build_scripts/build_macos-1-gui.sh @@ -9,7 +9,7 @@ git submodule update --init chia-blockchain-gui cd ./chia-blockchain-gui || exit 1 echo "npm build" -npx --no lerna clean -y # With --no option, `npx` guarantees not to install package from remote registry +npx lerna clean -y # Removes packages/*/node_modules npm ci # Audit fix does not currently work with Lerna. See https://github.com/lerna/lerna/issues/1663 # npm audit fix diff --git a/build_scripts/build_windows-1-gui.ps1 b/build_scripts/build_windows-1-gui.ps1 index 4c3d8e839806..fe368bd402d5 100644 --- a/build_scripts/build_windows-1-gui.ps1 +++ b/build_scripts/build_windows-1-gui.ps1 @@ -14,8 +14,8 @@ Write-Output "Build GUI npm modules" Write-Output " ---" $Env:NODE_OPTIONS = "--max-old-space-size=3000" -Write-Output "lerna clean -y" -npx --no lerna clean -y # With --no option, `npx` guarantees not to install package from remote registry +Write-Output "npx lerna clean -y" +npx lerna clean -y # Removes packages/*/node_modules Write-Output "npm ci" npm ci # Audit fix does not currently work with Lerna. See https://github.com/lerna/lerna/issues/1663 From 6ed262e3fbe2f574c555b806c280a3e7a12e9c55 Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Mon, 24 Jun 2024 13:53:59 -0700 Subject: [PATCH 16/77] [CHIA-690] Add offer expiration to CLI (#18193) * Add offer expiration to CLI * Add timezone information --- chia/_tests/cmds/wallet/test_wallet.py | 28 +++++++++++++++++++++++++- chia/cmds/wallet_funcs.py | 16 ++++++++++++++- 2 files changed, 42 insertions(+), 2 deletions(-) diff --git a/chia/_tests/cmds/wallet/test_wallet.py b/chia/_tests/cmds/wallet/test_wallet.py index 751c55f1965e..2fbae9253c10 100644 --- a/chia/_tests/cmds/wallet/test_wallet.py +++ b/chia/_tests/cmds/wallet/test_wallet.py @@ -1,5 +1,6 @@ from __future__ import annotations +import datetime import os from pathlib import Path from typing import Any, Dict, List, Optional, Tuple, Union, cast @@ -855,7 +856,12 @@ async def get_all_offers( ], trade_id=bytes32([1 + i] * 32), status=uint32(TradeStatus.PENDING_ACCEPT.value), - valid_times=ConditionValidTimes(), + valid_times=ConditionValidTimes( + min_time=uint64(0), + max_time=uint64(100), + min_height=uint32(0), + max_height=uint32(100), + ), ) records.append(trade_offer) return records @@ -888,6 +894,26 @@ async def get_all_offers( ] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = {"get_all_offers": [(0, 10, None, True, False, True, True, True)]} + command_args = [ + "wallet", + "get_offers", + FINGERPRINT_ARG, + "--summaries", + ] + tzinfo = datetime.datetime.now(datetime.timezone.utc).astimezone().tzinfo + # these are various things that should be in the output + assert_list = [ + "Timelock information:", + " - Not valid until ", + " - Expires at ", + f"{datetime.datetime.fromtimestamp(0, tz=tzinfo).strftime('%Y-%m-%d %H:%M %Z')}", + f"{datetime.datetime.fromtimestamp(100, tz=tzinfo).strftime('%Y-%m-%d %H:%M %Z')}", + "height 0", + "height 100", + ] + run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) + assert expected_calls["get_all_offers"] is not None + expected_calls["get_all_offers"].append((0, 10, None, False, True, False, False, False)) test_rpc_clients.wallet_rpc_client.check_log(expected_calls) diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py index f286c7b2a492..ab762b9ae60e 100644 --- a/chia/cmds/wallet_funcs.py +++ b/chia/cmds/wallet_funcs.py @@ -6,7 +6,7 @@ import pathlib import sys import time -from datetime import datetime +from datetime import datetime, timezone from decimal import Decimal from typing import Any, Awaitable, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -607,6 +607,11 @@ async def print_offer_summary( print(output) +def format_timestamp_with_timezone(timestamp: int) -> str: + tzinfo = datetime.now(timezone.utc).astimezone().tzinfo + return datetime.fromtimestamp(timestamp, tz=tzinfo).strftime("%Y-%m-%d %H:%M %Z") + + async def print_trade_record(record: TradeRecord, wallet_client: WalletRpcClient, summaries: bool = False) -> None: print() print(f"Record with id: {record.trade_id}") @@ -629,6 +634,15 @@ async def print_trade_record(record: TradeRecord, wallet_client: WalletRpcClient print("Pending Outbound Balances:") await print_offer_summary(cat_name_resolver, outbound_balances, has_fee=(fees > 0)) print(f"Included Fees: {fees / units['chia']} XCH, {fees} mojos") + print("Timelock information:") + if record.valid_times.min_time is not None: + print(" - Not valid until " f"{format_timestamp_with_timezone(record.valid_times.min_time)}") + if record.valid_times.min_height is not None: + print(f" - Not valid until height {record.valid_times.min_height}") + if record.valid_times.max_time is not None: + print(" - Expires at " f"{format_timestamp_with_timezone(record.valid_times.max_time)} " "(+/- 10 min)") + if record.valid_times.max_height is not None: + print(f" - Expires at height {record.valid_times.max_height} (wait ~10 blocks after to be reorg safe)") print("---------------") From 20fb8b223f7f140d3a2ea5651933d42a5c4078db Mon Sep 17 00:00:00 2001 From: Florin Chirica Date: Mon, 24 Jun 2024 23:54:13 +0300 Subject: [PATCH 17/77] DL query multiple ancestors in upsert. (#18146) --- chia/data_layer/data_store.py | 50 +++++++++++++++++++++++++++-------- 1 file changed, 39 insertions(+), 11 deletions(-) diff --git a/chia/data_layer/data_store.py b/chia/data_layer/data_store.py index 59c41ec11c52..741a6495a854 100644 --- a/chia/data_layer/data_store.py +++ b/chia/data_layer/data_store.py @@ -39,7 +39,7 @@ ) from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 -from chia.util.db_wrapper import DBWrapper2 +from chia.util.db_wrapper import SQLITE_MAX_VARIABLE_NUMBER, DBWrapper2 log = logging.getLogger(__name__) @@ -1541,16 +1541,18 @@ async def insert_batch( raise Exception(f"Operation in batch is not insert or delete: {change}") if len(pending_upsert_new_hashes) > 0: - to_update_hashes: Set[bytes32] = set() - for hash in pending_upsert_new_hashes.keys(): - while True: - if hash in to_update_hashes: - break - to_update_hashes.add(hash) - node = await self._get_one_ancestor(hash, store_id) - if node is None: - break - hash = node.hash + to_update_hashes: Set[bytes32] = set(pending_upsert_new_hashes.keys()) + to_update_queue: List[bytes32] = list(pending_upsert_new_hashes.keys()) + batch_size = min(500, SQLITE_MAX_VARIABLE_NUMBER - 10) + + while len(to_update_queue) > 0: + nodes = await self._get_one_ancestor_multiple_hashes(to_update_queue[:batch_size], store_id) + to_update_queue = to_update_queue[batch_size:] + for node in nodes: + if node.hash not in to_update_hashes: + to_update_hashes.add(node.hash) + to_update_queue.append(node.hash) + assert latest_local_root is not None assert latest_local_root.node_hash is not None new_root_hash = await self.batch_upsert( @@ -1651,6 +1653,32 @@ async def _get_one_ancestor( return None return InternalNode.from_row(row=row) + async def _get_one_ancestor_multiple_hashes( + self, + node_hashes: List[bytes32], + store_id: bytes32, + generation: Optional[int] = None, + ) -> List[InternalNode]: + async with self.db_wrapper.reader() as reader: + node_hashes_place_holders = ",".join("?" for _ in node_hashes) + if generation is None: + generation = await self.get_tree_generation(store_id=store_id) + cursor = await reader.execute( + f""" + SELECT * from node INNER JOIN ( + SELECT ancestors.ancestor AS hash, MAX(ancestors.generation) AS generation + FROM ancestors + WHERE ancestors.hash IN ({node_hashes_place_holders}) + AND ancestors.tree_id == ? + AND ancestors.generation <= ? + GROUP BY hash + ) asc on asc.hash == node.hash + """, + [*node_hashes, store_id, generation], + ) + rows = await cursor.fetchall() + return [InternalNode.from_row(row=row) for row in rows] + async def build_ancestor_table_for_latest_root(self, store_id: bytes32) -> None: async with self.db_wrapper.writer() as writer: root = await self.get_tree_root(store_id=store_id) From 9abedb578b8966a06679f821f55126ae8fcc8e13 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Mon, 24 Jun 2024 21:54:42 +0100 Subject: [PATCH 18/77] CHIA-765 Add the ability to set a fee in make_item (#18174) Add the ability to set a fee in make_item. --- chia/_tests/core/mempool/test_mempool.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/chia/_tests/core/mempool/test_mempool.py b/chia/_tests/core/mempool/test_mempool.py index 7f1b78263259..2d513f7ea6bb 100644 --- a/chia/_tests/core/mempool/test_mempool.py +++ b/chia/_tests/core/mempool/test_mempool.py @@ -98,11 +98,13 @@ def generate_test_spend_bundle( return transaction -def make_item(idx: int, cost: uint64 = uint64(80), assert_height: uint32 = uint32(100)) -> MempoolItem: +def make_item( + idx: int, cost: uint64 = uint64(80), assert_height: uint32 = uint32(100), fee: uint64 = uint64(0) +) -> MempoolItem: spend_bundle_name = bytes32([idx] * 32) return MempoolItem( SpendBundle([], G2Element()), - uint64(0), + fee, NPCResult(None, SpendBundleConditions([], 0, 0, 0, None, None, [], cost, 0, 0)), spend_bundle_name, uint32(0), From 7a8489ecc95d08364a83791141b2d8bf48e5acfa Mon Sep 17 00:00:00 2001 From: wjblanke Date: Mon, 24 Jun 2024 13:56:04 -0700 Subject: [PATCH 19/77] Fix for freebsd suggested by user alghmma (#17994) * fix for freebsd suggested by user alghmma * fix copy paste --- chia/util/timing.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/chia/util/timing.py b/chia/util/timing.py index e2551d007d01..3c666fa47299 100644 --- a/chia/util/timing.py +++ b/chia/util/timing.py @@ -25,7 +25,10 @@ # https://docs.github.com/en/actions/learn-github-actions/environment-variables#default-environment-variables _system_delay = system_delays["github"][sys.platform] else: - _system_delay = system_delays["local"][sys.platform] + try: + _system_delay = system_delays["local"][sys.platform] + except KeyError: + _system_delay = system_delays["local"]["linux"] @overload From a958f8053785a232e33ef4a00ad979c804a3835f Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Mon, 24 Jun 2024 17:06:12 -0400 Subject: [PATCH 20/77] remove no-op semver action from rpm build (#18224) --- .github/workflows/build-linux-installer-rpm.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index 28a82b1bb6d3..aef0ad1be609 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -79,9 +79,6 @@ jobs: env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - uses: Chia-Network/actions/enforce-semver@main - if: env.FULL_RELEASE == 'true' - - name: Get latest madmax plotter env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} From bce4b4a494eb6c81d452eae8a99317c83904a4f6 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Mon, 24 Jun 2024 22:12:20 +0100 Subject: [PATCH 21/77] CHIA-766 Deduplicate sb mempool asserts (#18175) Deduplicate sb mempool asserts. --- chia/_tests/core/mempool/test_mempool.py | 34 +++++++++++------------- 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/chia/_tests/core/mempool/test_mempool.py b/chia/_tests/core/mempool/test_mempool.py index 2d513f7ea6bb..867d0c0c9ea8 100644 --- a/chia/_tests/core/mempool/test_mempool.py +++ b/chia/_tests/core/mempool/test_mempool.py @@ -15,6 +15,8 @@ from chia._tests.core.mempool.test_mempool_manager import ( IDENTITY_PUZZLE_HASH, TEST_COIN, + assert_sb_in_pool, + assert_sb_not_in_pool, make_test_coins, mempool_item_from_spendbundle, mk_item, @@ -588,12 +590,6 @@ async def test_double_spend( assert sb2 is None assert status == MempoolInclusionStatus.PENDING - def assert_sb_in_pool(self, node: FullNodeSimulator, sb: SpendBundle) -> None: - assert sb == node.full_node.mempool_manager.get_spendbundle(sb.name()) - - def assert_sb_not_in_pool(self, node: FullNodeSimulator, sb: SpendBundle) -> None: - assert node.full_node.mempool_manager.get_spendbundle(sb.name()) is None - @pytest.mark.anyio async def test_double_spend_with_higher_fee( self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool @@ -624,15 +620,15 @@ async def test_double_spend_with_higher_fee( sb1_2 = await gen_and_send_sb(full_node_1, wallet_a, coin1, fee=uint64(1)) # Fee increase is insufficient, the old spendbundle must stay - self.assert_sb_in_pool(full_node_1, sb1_1) - self.assert_sb_not_in_pool(full_node_1, sb1_2) + assert_sb_in_pool(full_node_1.full_node.mempool_manager, sb1_1) + assert_sb_not_in_pool(full_node_1.full_node.mempool_manager, sb1_2) invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) sb1_3 = await gen_and_send_sb(full_node_1, wallet_a, coin1, fee=MEMPOOL_MIN_FEE_INCREASE) # Fee increase is sufficiently high, sb1_1 gets replaced with sb1_3 - self.assert_sb_not_in_pool(full_node_1, sb1_1) - self.assert_sb_in_pool(full_node_1, sb1_3) + assert_sb_not_in_pool(full_node_1.full_node.mempool_manager, sb1_1) + assert_sb_in_pool(full_node_1.full_node.mempool_manager, sb1_3) invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) sb2 = generate_test_spend_bundle(wallet_a, coin2, fee=MEMPOOL_MIN_FEE_INCREASE) @@ -641,8 +637,8 @@ async def test_double_spend_with_higher_fee( # Aggregated spendbundle sb12 replaces sb1_3 since it spends a superset # of coins spent in sb1_3 - self.assert_sb_in_pool(full_node_1, sb12) - self.assert_sb_not_in_pool(full_node_1, sb1_3) + assert_sb_in_pool(full_node_1.full_node.mempool_manager, sb12) + assert_sb_not_in_pool(full_node_1.full_node.mempool_manager, sb1_3) invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) sb3 = generate_test_spend_bundle(wallet_a, coin3, fee=uint64(MEMPOOL_MIN_FEE_INCREASE * 2)) @@ -651,20 +647,20 @@ async def test_double_spend_with_higher_fee( # sb23 must not replace existing sb12 as the former does not spend all # coins that are spent in the latter (specifically, coin1) - self.assert_sb_in_pool(full_node_1, sb12) - self.assert_sb_not_in_pool(full_node_1, sb23) + assert_sb_in_pool(full_node_1.full_node.mempool_manager, sb12) + assert_sb_not_in_pool(full_node_1.full_node.mempool_manager, sb23) invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) await send_sb(full_node_1, sb3) # Adding non-conflicting sb3 should succeed - self.assert_sb_in_pool(full_node_1, sb3) + assert_sb_in_pool(full_node_1.full_node.mempool_manager, sb3) invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) sb4_1 = generate_test_spend_bundle(wallet_a, coin4, fee=MEMPOOL_MIN_FEE_INCREASE) sb1234_1 = SpendBundle.aggregate([sb12, sb3, sb4_1]) await send_sb(full_node_1, sb1234_1) # sb1234_1 should not be in pool as it decreases total fees per cost - self.assert_sb_not_in_pool(full_node_1, sb1234_1) + assert_sb_not_in_pool(full_node_1.full_node.mempool_manager, sb1234_1) invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) sb4_2 = generate_test_spend_bundle(wallet_a, coin4, fee=uint64(MEMPOOL_MIN_FEE_INCREASE * 2)) @@ -672,9 +668,9 @@ async def test_double_spend_with_higher_fee( await send_sb(full_node_1, sb1234_2) # sb1234_2 has a higher fee per cost than its conflicts and should get # into mempool - self.assert_sb_in_pool(full_node_1, sb1234_2) - self.assert_sb_not_in_pool(full_node_1, sb12) - self.assert_sb_not_in_pool(full_node_1, sb3) + assert_sb_in_pool(full_node_1.full_node.mempool_manager, sb1234_2) + assert_sb_not_in_pool(full_node_1.full_node.mempool_manager, sb12) + assert_sb_not_in_pool(full_node_1.full_node.mempool_manager, sb3) invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) @pytest.mark.anyio From 058b80775a7fa4e0235dd7dbed13114f87b51eb8 Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Mon, 24 Jun 2024 14:34:02 -0700 Subject: [PATCH 22/77] [CHIA-711] Add `WalletActionScope` (#18125) * Add the concept of 'action scopes' * Add `WalletActionScope` * Add the concept of 'action scopes' * pylint and test coverage * add try/finally * add try/except * Undo giving a variable a name * Test coverage * Ban partial sigining in another scenario * Make WalletActionScope an alias instead * Add extra_spends to the action scope flow * Add test for .add_pending_transactions --- .../_tests/wallet/test_wallet_action_scope.py | 80 +++++++++++++ .../wallet/test_wallet_state_manager.py | 111 +++++++++++++++++- chia/wallet/wallet_action_scope.py | 95 +++++++++++++++ chia/wallet/wallet_state_manager.py | 69 ++++++++--- 4 files changed, 338 insertions(+), 17 deletions(-) create mode 100644 chia/_tests/wallet/test_wallet_action_scope.py create mode 100644 chia/wallet/wallet_action_scope.py diff --git a/chia/_tests/wallet/test_wallet_action_scope.py b/chia/_tests/wallet/test_wallet_action_scope.py new file mode 100644 index 000000000000..54583e96bc9c --- /dev/null +++ b/chia/_tests/wallet/test_wallet_action_scope.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import List, Optional, Tuple + +import pytest +from chia_rs import G2Element + +from chia._tests.cmds.wallet.test_consts import STD_TX +from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.types.spend_bundle import SpendBundle +from chia.wallet.signer_protocol import SigningResponse +from chia.wallet.transaction_record import TransactionRecord +from chia.wallet.wallet_action_scope import WalletSideEffects +from chia.wallet.wallet_state_manager import WalletStateManager + +MOCK_SR = SigningResponse(b"hey", bytes32([0] * 32)) +MOCK_SB = SpendBundle([], G2Element()) + + +def test_back_and_forth_serialization() -> None: + assert bytes(WalletSideEffects()) == b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + assert WalletSideEffects.from_bytes(bytes(WalletSideEffects())) == WalletSideEffects() + assert WalletSideEffects.from_bytes(bytes(WalletSideEffects([STD_TX], [MOCK_SR], [MOCK_SB]))) == WalletSideEffects( + [STD_TX], [MOCK_SR], [MOCK_SB] + ) + assert WalletSideEffects.from_bytes( + bytes(WalletSideEffects([STD_TX, STD_TX], [MOCK_SR, MOCK_SR], [MOCK_SB, MOCK_SB])) + ) == WalletSideEffects([STD_TX, STD_TX], [MOCK_SR, MOCK_SR], [MOCK_SB, MOCK_SB]) + + +@dataclass +class MockWalletStateManager: + most_recent_call: Optional[ + Tuple[List[TransactionRecord], bool, bool, bool, List[SigningResponse], List[SpendBundle]] + ] = None + + async def add_pending_transactions( + self, + txs: List[TransactionRecord], + push: bool, + merge_spends: bool, + sign: bool, + additional_signing_responses: List[SigningResponse], + extra_spends: List[SpendBundle], + ) -> List[TransactionRecord]: + self.most_recent_call = (txs, push, merge_spends, sign, additional_signing_responses, extra_spends) + return txs + + +MockWalletStateManager.new_action_scope = WalletStateManager.new_action_scope # type: ignore[attr-defined] + + +@pytest.mark.anyio +async def test_wallet_action_scope() -> None: + wsm = MockWalletStateManager() + async with wsm.new_action_scope( # type: ignore[attr-defined] + push=True, + merge_spends=False, + sign=True, + additional_signing_responses=[], + extra_spends=[], + ) as action_scope: + async with action_scope.use() as interface: + interface.side_effects.transactions = [STD_TX] + + with pytest.raises(RuntimeError): + action_scope.side_effects + + assert action_scope.side_effects.transactions == [STD_TX] + assert wsm.most_recent_call == ([STD_TX], True, False, True, [], []) + + async with wsm.new_action_scope( # type: ignore[attr-defined] + push=False, merge_spends=True, sign=True, additional_signing_responses=[] + ) as action_scope: + async with action_scope.use() as interface: + interface.side_effects.transactions = [] + + assert action_scope.side_effects.transactions == [] + assert wsm.most_recent_call == ([], False, True, True, [], []) diff --git a/chia/_tests/wallet/test_wallet_state_manager.py b/chia/_tests/wallet/test_wallet_state_manager.py index 06ee7bf518e5..826db346e8d3 100644 --- a/chia/_tests/wallet/test_wallet_state_manager.py +++ b/chia/_tests/wallet/test_wallet_state_manager.py @@ -1,19 +1,26 @@ from __future__ import annotations from contextlib import asynccontextmanager -from typing import AsyncIterator +from typing import AsyncIterator, List import pytest +from chia_rs import G2Element +from chia._tests.environments.wallet import WalletTestFramework from chia._tests.util.setup_nodes import OldSimulatorsAndWallets from chia.protocols.wallet_protocol import CoinState from chia.server.outbound_message import NodeType from chia.types.blockchain_format.coin import Coin +from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.types.coin_spend import make_spend from chia.types.peer_info import PeerInfo +from chia.types.spend_bundle import SpendBundle from chia.util.ints import uint32, uint64 from chia.wallet.derivation_record import DerivationRecord from chia.wallet.derive_keys import master_sk_to_wallet_sk, master_sk_to_wallet_sk_unhardened +from chia.wallet.transaction_record import TransactionRecord +from chia.wallet.util.transaction_type import TransactionType from chia.wallet.util.wallet_types import WalletType from chia.wallet.wallet_state_manager import WalletStateManager @@ -95,3 +102,105 @@ async def test_determine_coin_type(simulator_and_wallet: OldSimulatorsAndWallets assert (None, None) == await wallet_state_manager.determine_coin_type( peer, CoinState(Coin(bytes32(b"1" * 32), bytes32(b"1" * 32), uint64(0)), uint32(0), uint32(0)), None ) + + +@pytest.mark.parametrize( + "wallet_environments", + [{"num_environments": 1, "blocks_needed": [1], "trusted": True, "reuse_puzhash": True}], + indirect=True, +) +@pytest.mark.limit_consensus_modes(reason="irrelevant") +@pytest.mark.anyio +async def test_commit_transactions_to_db(wallet_environments: WalletTestFramework) -> None: + env = wallet_environments.environments[0] + wsm = env.wallet_state_manager + + coins = list( + await wsm.main_wallet.select_coins( + uint64(2_000_000_000_000), coin_selection_config=wallet_environments.tx_config.coin_selection_config + ) + ) + [tx1] = await wsm.main_wallet.generate_signed_transaction( + uint64(0), + bytes32([0] * 32), + wallet_environments.tx_config, + coins={coins[0]}, + ) + [tx2] = await wsm.main_wallet.generate_signed_transaction( + uint64(0), + bytes32([0] * 32), + wallet_environments.tx_config, + coins={coins[1]}, + ) + + def flatten_spend_bundles(txs: List[TransactionRecord]) -> List[SpendBundle]: + return [tx.spend_bundle for tx in txs if tx.spend_bundle is not None] + + assert ( + len(await wsm.tx_store.get_all_transactions_for_wallet(wsm.main_wallet.id(), type=TransactionType.OUTGOING_TX)) + == 0 + ) + new_txs = await wsm.add_pending_transactions( + [tx1, tx2], + push=False, + merge_spends=False, + sign=False, + extra_spends=[], + ) + bundles = flatten_spend_bundles(new_txs) + assert len(bundles) == 2 + for bundle in bundles: + assert bundle.aggregated_signature == G2Element() + assert ( + len(await wsm.tx_store.get_all_transactions_for_wallet(wsm.main_wallet.id(), type=TransactionType.OUTGOING_TX)) + == 0 + ) + + extra_coin_spend = make_spend( + Coin(bytes32(b"1" * 32), bytes32(b"1" * 32), uint64(0)), Program.to(1), Program.to([None]) + ) + extra_spend = SpendBundle([extra_coin_spend], G2Element()) + + new_txs = await wsm.add_pending_transactions( + [tx1, tx2], + push=False, + merge_spends=False, + sign=False, + extra_spends=[extra_spend], + ) + bundles = flatten_spend_bundles(new_txs) + assert len(bundles) == 2 + for bundle in bundles: + assert bundle.aggregated_signature == G2Element() + assert ( + len(await wsm.tx_store.get_all_transactions_for_wallet(wsm.main_wallet.id(), type=TransactionType.OUTGOING_TX)) + == 0 + ) + assert extra_coin_spend in [spend for bundle in bundles for spend in bundle.coin_spends] + + new_txs = await wsm.add_pending_transactions( + [tx1, tx2], + push=False, + merge_spends=True, + sign=False, + extra_spends=[extra_spend], + ) + bundles = flatten_spend_bundles(new_txs) + assert len(bundles) == 1 + for bundle in bundles: + assert bundle.aggregated_signature == G2Element() + assert ( + len(await wsm.tx_store.get_all_transactions_for_wallet(wsm.main_wallet.id(), type=TransactionType.OUTGOING_TX)) + == 0 + ) + assert extra_coin_spend in [spend for bundle in bundles for spend in bundle.coin_spends] + + [tx1, tx2] = await wsm.add_pending_transactions([tx1, tx2], push=True, merge_spends=True, sign=True) + bundles = flatten_spend_bundles(new_txs) + assert len(bundles) == 1 + assert ( + len(await wsm.tx_store.get_all_transactions_for_wallet(wsm.main_wallet.id(), type=TransactionType.OUTGOING_TX)) + == 2 + ) + + await wallet_environments.full_node.wait_transaction_records_entered_mempool([tx1, tx2]) diff --git a/chia/wallet/wallet_action_scope.py b/chia/wallet/wallet_action_scope.py new file mode 100644 index 000000000000..85f4cb759b8f --- /dev/null +++ b/chia/wallet/wallet_action_scope.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +import contextlib +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, AsyncIterator, List, Optional, cast + +from chia.types.spend_bundle import SpendBundle +from chia.util.action_scope import ActionScope +from chia.wallet.signer_protocol import SigningResponse +from chia.wallet.transaction_record import TransactionRecord + +if TYPE_CHECKING: + # Avoid a circular import here + from chia.wallet.wallet_state_manager import WalletStateManager + + +@dataclass +class WalletSideEffects: + transactions: List[TransactionRecord] = field(default_factory=list) + signing_responses: List[SigningResponse] = field(default_factory=list) + extra_spends: List[SpendBundle] = field(default_factory=list) + + def __bytes__(self) -> bytes: + blob = b"" + blob += len(self.transactions).to_bytes(4, "big") + for tx in self.transactions: + tx_bytes = bytes(tx) + blob += len(tx_bytes).to_bytes(4, "big") + tx_bytes + blob += len(self.signing_responses).to_bytes(4, "big") + for sr in self.signing_responses: + sr_bytes = bytes(sr) + blob += len(sr_bytes).to_bytes(4, "big") + sr_bytes + blob += len(self.extra_spends).to_bytes(4, "big") + for sb in self.extra_spends: + sb_bytes = bytes(sb) + blob += len(sb_bytes).to_bytes(4, "big") + sb_bytes + return blob + + @classmethod + def from_bytes(cls, blob: bytes) -> WalletSideEffects: + instance = cls() + while blob != b"": + tx_len_prefix = int.from_bytes(blob[:4], "big") + blob = blob[4:] + for _ in range(0, tx_len_prefix): + len_prefix = int.from_bytes(blob[:4], "big") + blob = blob[4:] + instance.transactions.append(TransactionRecord.from_bytes(blob[:len_prefix])) + blob = blob[len_prefix:] + sr_len_prefix = int.from_bytes(blob[:4], "big") + blob = blob[4:] + for _ in range(0, sr_len_prefix): + len_prefix = int.from_bytes(blob[:4], "big") + blob = blob[4:] + instance.signing_responses.append(SigningResponse.from_bytes(blob[:len_prefix])) + blob = blob[len_prefix:] + sb_len_prefix = int.from_bytes(blob[:4], "big") + blob = blob[4:] + for _ in range(0, sb_len_prefix): + len_prefix = int.from_bytes(blob[:4], "big") + blob = blob[4:] + instance.extra_spends.append(SpendBundle.from_bytes(blob[:len_prefix])) + blob = blob[len_prefix:] + + return instance + + +WalletActionScope = ActionScope[WalletSideEffects] + + +@contextlib.asynccontextmanager +async def new_wallet_action_scope( + wallet_state_manager: WalletStateManager, + push: bool = False, + merge_spends: bool = True, + sign: Optional[bool] = None, + additional_signing_responses: List[SigningResponse] = [], + extra_spends: List[SpendBundle] = [], +) -> AsyncIterator[WalletActionScope]: + async with ActionScope.new_scope(WalletSideEffects) as self: + self = cast(WalletActionScope, self) + async with self.use() as interface: + interface.side_effects.signing_responses = additional_signing_responses.copy() + interface.side_effects.extra_spends = extra_spends.copy() + + yield self + + self.side_effects.transactions = await wallet_state_manager.add_pending_transactions( + self.side_effects.transactions, + push=push, + merge_spends=merge_spends, + sign=sign, + additional_signing_responses=self.side_effects.signing_responses, + extra_spends=self.side_effects.extra_spends, + ) diff --git a/chia/wallet/wallet_state_manager.py b/chia/wallet/wallet_state_manager.py index f0a8073f7724..0b2c514073d1 100644 --- a/chia/wallet/wallet_state_manager.py +++ b/chia/wallet/wallet_state_manager.py @@ -1,6 +1,7 @@ from __future__ import annotations import asyncio +import contextlib import dataclasses import logging import multiprocessing.context @@ -143,6 +144,7 @@ from chia.wallet.vc_wallet.vc_store import VCStore from chia.wallet.vc_wallet.vc_wallet import VCWallet from chia.wallet.wallet import Wallet +from chia.wallet.wallet_action_scope import WalletActionScope, new_wallet_action_scope from chia.wallet.wallet_blockchain import WalletBlockchain from chia.wallet.wallet_coin_record import MetadataTypes, WalletCoinRecord from chia.wallet.wallet_coin_store import WalletCoinStore @@ -2254,9 +2256,11 @@ async def coin_added( async def add_pending_transactions( self, tx_records: List[TransactionRecord], + push: bool = True, merge_spends: bool = True, sign: Optional[bool] = None, - additional_signing_responses: List[SigningResponse] = [], + additional_signing_responses: Optional[List[SigningResponse]] = None, + extra_spends: Optional[List[SpendBundle]] = None, ) -> List[TransactionRecord]: """ Add a list of transactions to be submitted to the full node. @@ -2267,6 +2271,8 @@ async def add_pending_transactions( agg_spend: SpendBundle = SpendBundle.aggregate( [tx.spend_bundle for tx in tx_records if tx.spend_bundle is not None] ) + if extra_spends is not None: + agg_spend = SpendBundle.aggregate([agg_spend, *extra_spends]) actual_spend_involved: bool = agg_spend != SpendBundle([], G2Element()) if merge_spends and actual_spend_involved: tx_records = [ @@ -2277,27 +2283,39 @@ async def add_pending_transactions( ) for i, tx in enumerate(tx_records) ] + elif extra_spends is not None and extra_spends != []: + extra_spends.extend([] if tx_records[0].spend_bundle is None else [tx_records[0].spend_bundle]) + extra_spend_bundle = SpendBundle.aggregate(extra_spends) + tx_records = [ + dataclasses.replace( + tx, + spend_bundle=extra_spend_bundle if i == 0 else tx.spend_bundle, + name=extra_spend_bundle.name() if i == 0 else bytes32.secret(), + ) + for i, tx in enumerate(tx_records) + ] if sign: tx_records, _ = await self.sign_transactions( tx_records, - additional_signing_responses, - additional_signing_responses != [], + [] if additional_signing_responses is None else additional_signing_responses, + additional_signing_responses != [] and additional_signing_responses is not None, ) - all_coins_names = [] - async with self.db_wrapper.writer_maybe_transaction(): - for tx_record in tx_records: - # Wallet node will use this queue to retry sending this transaction until full nodes receives it - await self.tx_store.add_transaction_record(tx_record) - all_coins_names.extend([coin.name() for coin in tx_record.additions]) - all_coins_names.extend([coin.name() for coin in tx_record.removals]) + if push: + all_coins_names = [] + async with self.db_wrapper.writer_maybe_transaction(): + for tx_record in tx_records: + # Wallet node will use this queue to retry sending this transaction until full nodes receives it + await self.tx_store.add_transaction_record(tx_record) + all_coins_names.extend([coin.name() for coin in tx_record.additions]) + all_coins_names.extend([coin.name() for coin in tx_record.removals]) - await self.add_interested_coin_ids(all_coins_names) + await self.add_interested_coin_ids(all_coins_names) - if actual_spend_involved: - self.tx_pending_changed() - for wallet_id in {tx.wallet_id for tx in tx_records}: - self.state_changed("pending_transaction", wallet_id) - await self.wallet_node.update_ui() + if actual_spend_involved: + self.tx_pending_changed() + for wallet_id in {tx.wallet_id for tx in tx_records}: + self.state_changed("pending_transaction", wallet_id) + await self.wallet_node.update_ui() return tx_records @@ -2738,3 +2756,22 @@ async def submit_transactions(self, signed_txs: List[SignedTransaction]) -> List for bundle in bundles: await self.wallet_node.push_tx(bundle) return [bundle.name() for bundle in bundles] + + @contextlib.asynccontextmanager + async def new_action_scope( + self, + push: bool = False, + merge_spends: bool = True, + sign: Optional[bool] = None, + additional_signing_responses: List[SigningResponse] = [], + extra_spends: List[SpendBundle] = [], + ) -> AsyncIterator[WalletActionScope]: + async with new_wallet_action_scope( + self, + push=push, + merge_spends=merge_spends, + sign=sign, + additional_signing_responses=additional_signing_responses, + extra_spends=extra_spends, + ) as action_scope: + yield action_scope From a83f59bf2bf33e4521e537016031c9be0f485d4a Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Mon, 24 Jun 2024 15:26:46 -0700 Subject: [PATCH 23/77] Fix backwards compatibility for `add_private_key` (#18237) * Fix backwards compatibility for `add_private_key` * Another backwards compatibility fix --- chia/daemon/keychain_server.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/chia/daemon/keychain_server.py b/chia/daemon/keychain_server.py index b5b506c560f1..57edc5c387e4 100644 --- a/chia/daemon/keychain_server.py +++ b/chia/daemon/keychain_server.py @@ -174,9 +174,9 @@ def get_keychain_for_request(self, request: Dict[str, Any]) -> Keychain: async def handle_command(self, command: str, data: Dict[str, Any]) -> Dict[str, Any]: try: if command == "add_private_key": - return await self.add_key( - {"mnemonic_or_pk": data.get("mnemonic", None), "label": data.get("label", None), "private": True} - ) + data["private"] = True + data["mnemonic_or_pk"] = data.get("mnemonic_or_pk", data.get("mnemonic", None)) + return await self.add_key(data) elif command == "add_key": return await self.add_key(data) elif command == "check_keys": From 406550e4ccd1b3d683d58b537f1cab3a307a4b54 Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Mon, 24 Jun 2024 15:29:38 -0700 Subject: [PATCH 24/77] Resolve light wallet syncing regression (#18236) * Update CHANGELOG for 2.4.1 * Update some other items int he changelog * Revert "Update some other items int he changelog" This reverts commit 047b816cfe4a1f36174a382d8e74d5a67d2f5612. * changelog updates --- CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7bc7204696ee..0ef6835e8995 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,18 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project does not yet adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html) for setuptools_scm/PEP 440 reasons. +## 2.4.1 Chia blockchain 2024-06-25 + +## What's Changed +### Fixed +* Fixed light wallet (wallet only) syncing issues introduced in 2.4.0 + +### Known Issues +* A breaking backwards compatibility issue was introduced in 2.4.0 in the daemon RPC call `add_private_key`. We expect to resolve this in a future release. + +### Deprecated +macOS 11 (Big Sur) is deprecated. This release (2.4.1) will be the last release to support macOS 11 + ## 2.4.0 Chia blockchain 2024-06-20 ## What's Changed From a447734728fb5a413a2d1cdf2901bf65d36b4d3c Mon Sep 17 00:00:00 2001 From: Jack Nelson Date: Mon, 24 Jun 2024 19:52:40 -0400 Subject: [PATCH 25/77] Refactor CLI by adding add new click types for fees, amounts, addresses and bytes32 (#15718) --- chia/_tests/cmds/test_click_types.py | 193 ++++++ chia/_tests/cmds/test_tx_config_args.py | 18 +- chia/_tests/cmds/wallet/test_coins.py | 5 +- chia/_tests/cmds/wallet/test_wallet.py | 28 +- chia/_tests/core/data_layer/test_data_rpc.py | 14 +- chia/_tests/pools/test_pool_cmdline.py | 18 +- chia/cmds/cmds_util.py | 35 +- chia/cmds/coin_funcs.py | 58 +- chia/cmds/coins.py | 84 +-- chia/cmds/dao.py | 672 +++++++++---------- chia/cmds/dao_funcs.py | 355 ++++------ chia/cmds/data.py | 121 ++-- chia/cmds/data_funcs.py | 135 ++-- chia/cmds/options.py | 14 + chia/cmds/param_types.py | 221 ++++++ chia/cmds/plotnft.py | 86 +-- chia/cmds/plotnft_funcs.py | 37 +- chia/cmds/wallet.py | 342 +++++----- chia/cmds/wallet_funcs.py | 297 ++++---- chia/rpc/wallet_rpc_client.py | 4 +- chia/wallet/trading/offer.py | 8 +- 21 files changed, 1438 insertions(+), 1307 deletions(-) create mode 100644 chia/_tests/cmds/test_click_types.py create mode 100644 chia/cmds/param_types.py diff --git a/chia/_tests/cmds/test_click_types.py b/chia/_tests/cmds/test_click_types.py new file mode 100644 index 000000000000..45184132c443 --- /dev/null +++ b/chia/_tests/cmds/test_click_types.py @@ -0,0 +1,193 @@ +from __future__ import annotations + +from decimal import Decimal +from pathlib import Path +from typing import Any, cast + +import pytest +from click import BadParameter, Context + +from chia.cmds.param_types import ( + AddressParamType, + AmountParamType, + Bytes32ParamType, + CliAddress, + CliAmount, + TransactionFeeParamType, + Uint64ParamType, +) +from chia.cmds.units import units +from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.util.bech32m import encode_puzzle_hash +from chia.util.ints import uint64 +from chia.wallet.util.address_type import AddressType + +""" +This File tests all of the custom click param types. +Click automatically handles all cases where it is None and all cases where it is in some sort of Iterable. +""" + +burn_ph = bytes32.from_hexstr("0x000000000000000000000000000000000000000000000000000000000000dead") +burn_address = encode_puzzle_hash(burn_ph, "xch") +burn_address_txch = encode_puzzle_hash(burn_ph, "txch") +burn_nft_addr = encode_puzzle_hash(burn_ph, "did:chia:") +burn_bad_prefix = encode_puzzle_hash(burn_ph, "badprefix") +overflow_ammt = 18446744073709551616 # max coin + 1 +overflow_decimal_str = "18446744.073709551616" +overflow_decimal = Decimal(overflow_decimal_str) + + +class FakeContext: + obj: dict[Any, Any] = {} + + def __init__(self, obj: dict[Any, Any]): + self.obj = obj + + +def test_click_tx_fee_type() -> None: + # Test uint64 (only used as default) + # assert TransactionFeeParamType().convert(uint64(10000), None, None) == uint64(10000) + + # TODO: Test MOJO Logic When Implemented + + # Test Decimal / XCH + assert TransactionFeeParamType().convert("0.5", None, None) == uint64(Decimal("0.5") * units["chia"]) + assert TransactionFeeParamType().convert("0.000000000001", None, None) == uint64(1) + assert TransactionFeeParamType().convert("0", None, None) == uint64(0) + # Test Decimal Failures + with pytest.raises(BadParameter): + TransactionFeeParamType().convert("test", None, None) + with pytest.raises(BadParameter): + TransactionFeeParamType().convert("0.6", None, None) + with pytest.raises(BadParameter): + TransactionFeeParamType().convert("0.0000000000001", None, None) # 0.1 mojos + with pytest.raises(BadParameter): + TransactionFeeParamType().convert("-0.6", None, None) + with pytest.raises(BadParameter): + TransactionFeeParamType().convert(overflow_decimal_str, None, None) + # Test Type Failures + with pytest.raises(BadParameter): + TransactionFeeParamType().convert(float(0.01), None, None) + + +def test_click_amount_type() -> None: + decimal_cli_amount = CliAmount(mojos=False, amount=Decimal("5.25")) + large_decimal_amount = CliAmount(mojos=False, amount=overflow_decimal) + mojos_cli_amount = CliAmount(mojos=True, amount=uint64(100000)) + one_mojo_cli_amount = CliAmount(mojos=False, amount=Decimal("0.000000000001")) + # Test CliAmount (Generally is not used) + assert AmountParamType().convert(decimal_cli_amount, None, None) == decimal_cli_amount + + # Test uint64 (only usable as default) + # assert AmountParamType().convert(uint64(100000), None, None) == mojos_cli_amount + + # TODO: Test MOJO Logic When Implemented + + # Test Decimal / XCH (we don't test overflow because we don't know the conversion ratio yet) + assert AmountParamType().convert("5.25", None, None) == decimal_cli_amount + assert AmountParamType().convert(overflow_decimal_str, None, None) == large_decimal_amount + assert AmountParamType().convert("0.000000000001", None, None) == one_mojo_cli_amount + # Test Decimal Failures + with pytest.raises(BadParameter): + AmountParamType().convert("test", None, None) + with pytest.raises(BadParameter): + AmountParamType().convert("0.0000000000001", None, None) # 0.1 mojos + with pytest.raises(BadParameter): + AmountParamType().convert("-999999", None, None) + with pytest.raises(BadParameter): + AmountParamType().convert("-0.6", None, None) + # Test Type Failures + with pytest.raises(BadParameter): + AmountParamType().convert(0.01, None, None) + + # Test CliAmount Class + assert decimal_cli_amount.convert_amount(units["chia"]) == uint64(Decimal("5.25") * units["chia"]) + assert mojos_cli_amount.convert_amount(units["chia"]) == uint64(100000) + assert one_mojo_cli_amount.convert_amount(units["chia"]) == uint64(1) + with pytest.raises(ValueError): # incorrect arg + CliAmount(mojos=True, amount=Decimal("5.25")).convert_amount(units["chia"]) + with pytest.raises(ValueError): # incorrect arg + CliAmount(mojos=False, amount=uint64(100000)).convert_amount(units["chia"]) + with pytest.raises(ValueError): # overflow + large_decimal_amount.convert_amount(units["chia"]) + + +def test_click_address_type() -> None: + context = cast(Context, FakeContext(obj={"expected_prefix": "xch"})) # this makes us not have to use a config file + std_cli_address = CliAddress(burn_ph, burn_address, AddressType.XCH) + nft_cli_address = CliAddress(burn_ph, burn_nft_addr, AddressType.DID) + # Test CliAddress (Generally is not used) + # assert AddressParamType().convert(std_cli_address, None, context) == std_cli_address + + # test address parsing + assert AddressParamType().convert(burn_address, None, context) == std_cli_address + assert AddressParamType().convert(burn_nft_addr, None, context) == nft_cli_address + + # check address type validation + assert std_cli_address.validate_address_type(AddressType.XCH) == burn_address + assert std_cli_address.validate_address_type_get_ph(AddressType.XCH) == burn_ph + assert nft_cli_address.validate_address_type(AddressType.DID) == burn_nft_addr + assert nft_cli_address.validate_address_type_get_ph(AddressType.DID) == burn_ph + # check error handling + with pytest.raises(BadParameter): + AddressParamType().convert("test", None, None) + with pytest.raises(AttributeError): # attribute error because the context does not have a real error handler + AddressParamType().convert(burn_address_txch, None, context) + with pytest.raises(BadParameter): + AddressParamType().convert(burn_bad_prefix, None, None) + # Test Type Failures + with pytest.raises(BadParameter): + AddressParamType().convert(float(0.01), None, None) + + # check class error handling + with pytest.raises(ValueError): + std_cli_address.validate_address_type_get_ph(AddressType.DID) + with pytest.raises(ValueError): + std_cli_address.validate_address_type(AddressType.DID) + + +def test_click_address_type_config(root_path_populated_with_config: Path) -> None: + # set a root path in context. + context = cast(Context, FakeContext(obj={"root_path": root_path_populated_with_config})) + # run test that should pass + assert AddressParamType().convert(burn_address, None, context) == CliAddress(burn_ph, burn_address, AddressType.XCH) + assert context.obj["expected_prefix"] == "xch" # validate that the prefix was set correctly + # use txch address + with pytest.raises(AttributeError): # attribute error because the context does not have a real error handler + AddressParamType().convert(burn_address_txch, None, context) + + +def test_click_bytes32_type() -> None: + # Test bytes32 (Generally it is not used) + # assert Bytes32ParamType().convert(burn_ph, None, None) == burn_ph + + # test bytes32 parsing + assert Bytes32ParamType().convert("0x" + burn_ph.hex(), None, None) == burn_ph + # check error handling + with pytest.raises(BadParameter): + Bytes32ParamType().convert("test", None, None) + # Test Type Failures + with pytest.raises(BadParameter): + Bytes32ParamType().convert(float(0.01), None, None) + + +def test_click_uint64_type() -> None: + # Test uint64 (only used as default) + assert Uint64ParamType().convert(uint64(10000), None, None) == uint64(10000) + + # Test Uint64 Parsing + assert Uint64ParamType().convert("5", None, None) == uint64(5) + assert Uint64ParamType().convert("10000000000000", None, None) == uint64(10000000000000) + assert Uint64ParamType().convert("0", None, None) == uint64(0) + # Test Failures + with pytest.raises(BadParameter): + Uint64ParamType().convert("test", None, None) + with pytest.raises(BadParameter): + Uint64ParamType().convert("0.1", None, None) + with pytest.raises(BadParameter): + Uint64ParamType().convert("-1", None, None) + with pytest.raises(BadParameter): + Uint64ParamType().convert(str(overflow_ammt), None, None) + # Test Type Failures + with pytest.raises(BadParameter): + Uint64ParamType().convert(float(0.01), None, None) diff --git a/chia/_tests/cmds/test_tx_config_args.py b/chia/_tests/cmds/test_tx_config_args.py index a782be0b0c9f..76837d2664a5 100644 --- a/chia/_tests/cmds/test_tx_config_args.py +++ b/chia/_tests/cmds/test_tx_config_args.py @@ -7,6 +7,8 @@ from click.testing import CliRunner from chia.cmds.cmds_util import CMDCoinSelectionConfigLoader, CMDTXConfigLoader, coin_selection_args, tx_config_args +from chia.cmds.param_types import CliAmount +from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.config import create_default_chia_config, load_config @@ -14,10 +16,10 @@ def test_coin_selection_args() -> None: @click.command() @coin_selection_args def test_cmd( - min_coin_amount: Optional[str], - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], ) -> None: print( CMDCoinSelectionConfigLoader( @@ -95,10 +97,10 @@ def test_tx_config_args() -> None: @click.command() @tx_config_args def test_cmd( - min_coin_amount: Optional[str], - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], ) -> None: print( diff --git a/chia/_tests/cmds/wallet/test_coins.py b/chia/_tests/cmds/wallet/test_coins.py index ad2c1ae35ea7..3fbc47f27482 100644 --- a/chia/_tests/cmds/wallet/test_coins.py +++ b/chia/_tests/cmds/wallet/test_coins.py @@ -38,7 +38,10 @@ def test_coins_get_info(capsys: object, get_test_cli_clients: Tuple[TestRpcClien ( 1, CoinSelectionConfig( - min_coin_amount=uint64(0), max_coin_amount=uint64(0), excluded_coin_amounts=[], excluded_coin_ids=[] + min_coin_amount=uint64(0), + max_coin_amount=DEFAULT_TX_CONFIG.max_coin_amount, + excluded_coin_amounts=[], + excluded_coin_ids=[], ), ) ], diff --git a/chia/_tests/cmds/wallet/test_wallet.py b/chia/_tests/cmds/wallet/test_wallet.py index 2fbae9253c10..eaf58f957394 100644 --- a/chia/_tests/cmds/wallet/test_wallet.py +++ b/chia/_tests/cmds/wallet/test_wallet.py @@ -374,7 +374,7 @@ async def cat_spend( "wallet", "send", "-a1", - "-m1", + "-m0.5", "-o", WALLET_ID_ARG, f"-e{bytes32_hexstr}", @@ -410,7 +410,7 @@ async def cat_spend( excluded_coin_ids=[bytes32([98] * 32)], reuse_puzhash=True, ), - 1000000000000, + 500000000000, ["0x6262626262626262626262626262626262626262626262626262626262626262"], [{"decorator": "CLAWBACK", "clawback_timelock": 60}], ) @@ -427,7 +427,7 @@ async def cat_spend( ), 1000, "xch1qvpsxqcrqvpsxqcrqvpsxqcrqvpsxqcrqvpsxqcrqvpsxqcrqvps82kgr2", - 1000000000000, + 500000000000, ["0x6262626262626262626262626262626262626262626262626262626262626262"], None, None, @@ -494,14 +494,14 @@ async def spend_clawback_coins( "clawback", WALLET_ID_ARG, FINGERPRINT_ARG, - "-m1", + "-m0.5", "--tx_ids", f"{tx_ids[0].hex()},{tx_ids[1].hex()}, {tx_ids[2].hex()}", ] run_cli_command_and_assert(capsys, root_dir, command_args, ["transaction_ids", str(r_tx_ids_hex)]) # these are various things that should be in the output expected_calls: logType = { - "spend_clawback_coins": [(tx_ids, 1000000000000, False)], + "spend_clawback_coins": [(tx_ids, 500000000000, False)], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -644,7 +644,7 @@ def test_make_offer_bad_filename( FINGERPRINT_ARG, f"-p{str(tmp_path)}", "--reuse", - "-m1", + "-m0.5", "--offer", "1:10", "--offer", @@ -664,7 +664,7 @@ def test_make_offer_bad_filename( FINGERPRINT_ARG, f"-p{str(test_file)}", "--reuse", - "-m1", + "-m0.5", "--offer", "1:10", "--offer", @@ -735,7 +735,7 @@ def to_bech32(self) -> str: FINGERPRINT_ARG, f"-p{str(tmp_path / 'test.offer')}", "--reuse", - "-m1", + "-m0.5", "--offer", "1:10", "--offer", @@ -749,7 +749,7 @@ def to_bech32(self) -> str: "OFFERING:\n - 10 XCH (10000000000000 mojos)\n - 100 test3 (100000 mojos)", "REQUESTING:\n - 10 test2 (10000 mojos)\n" " - 1 nft1qgpqyqszqgpqyqszqgpqyqszqgpqyqszqgpqyqszqgpqyqszqgpqyql4ft (1 mojos)", - "Including Fees: 1 XCH, 1000000000000 mojos", + "Including Fees: 0.5 XCH, 500000000000 mojos", "Created offer with ID 0202020202020202020202020202020202020202020202020202020202020202", ] run_cli_command_and_assert(capsys, root_dir, command_args[:-4], ["without --override"]) @@ -802,7 +802,7 @@ def to_bech32(self) -> str: } }, None, - 1000000000000, + 500000000000, False, ) ], @@ -961,7 +961,7 @@ async def take_offer( ] with importlib_resources.as_file(test_offer_file_path) as test_offer_file_name: - command_args = ["wallet", "take_offer", os.fspath(test_offer_file_name), FINGERPRINT_ARG, "-m1", "--reuse"] + command_args = ["wallet", "take_offer", os.fspath(test_offer_file_name), FINGERPRINT_ARG, "-m0.5", "--reuse"] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { @@ -970,7 +970,7 @@ async def take_offer( (cat2,), (bytes32.from_hexstr("accce8e1c71b56624f2ecaeff5af57eac41365080449904d0717bd333c04806d"),), ], - "take_offer": [(Offer.from_bech32(test_offer_file_bech32), DEFAULT_TX_CONFIG, None, 1000000000000)], + "take_offer": [(Offer.from_bech32(test_offer_file_bech32), DEFAULT_TX_CONFIG, None, 500000000000)], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -1006,7 +1006,7 @@ async def cancel_offer( inst_rpc_client = CancelOfferRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client - command_args = ["wallet", "cancel_offer", FINGERPRINT_ARG, "-m1", "--id", test_offer_id] + command_args = ["wallet", "cancel_offer", FINGERPRINT_ARG, "-m0.5", "--id", test_offer_id] # these are various things that should be in the output cat1 = bytes32.from_hexstr("fd6a341ed39c05c31157d5bfea395a0e142398ced24deea1e82f836d7ec2909c") cat2 = bytes32.from_hexstr("dc59bcd60ce5fc9c93a5d3b11875486b03efb53a53da61e453f5cf61a7746860") @@ -1022,7 +1022,7 @@ async def cancel_offer( run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { "get_offer": [(test_offer_id_bytes, True)], - "cancel_offer": [(test_offer_id_bytes, DEFAULT_TX_CONFIG, 1000000000000, True)], + "cancel_offer": [(test_offer_id_bytes, DEFAULT_TX_CONFIG, 500000000000, True)], "cat_asset_id_to_name": [ (cat1,), (cat2,), diff --git a/chia/_tests/core/data_layer/test_data_rpc.py b/chia/_tests/core/data_layer/test_data_rpc.py index 49d4984ae306..8cad29a9a766 100644 --- a/chia/_tests/core/data_layer/test_data_rpc.py +++ b/chia/_tests/core/data_layer/test_data_rpc.py @@ -2994,7 +2994,7 @@ async def test_pagination_cmds( if layer == InterfaceLayer.funcs: keys = await get_keys_cmd( rpc_port=rpc_port, - store_id="0x" + store_id.hex(), + store_id=store_id, root_hash=None, fingerprint=None, page=0, @@ -3003,7 +3003,7 @@ async def test_pagination_cmds( ) keys_values = await get_keys_values_cmd( rpc_port=rpc_port, - store_id="0x" + store_id.hex(), + store_id=store_id, root_hash=None, fingerprint=None, page=0, @@ -3012,9 +3012,9 @@ async def test_pagination_cmds( ) kv_diff = await get_kv_diff_cmd( rpc_port=rpc_port, - store_id="0x" + store_id.hex(), - hash_1="0x" + hash_1.hex(), - hash_2="0x" + hash_2.hex(), + store_id=store_id, + hash_1=hash_1, + hash_2=hash_2, fingerprint=None, page=0, max_page_size=max_page_size, @@ -3222,7 +3222,7 @@ async def test_unsubmitted_batch_update( elif layer == InterfaceLayer.funcs: res = await update_data_store_cmd( rpc_port=rpc_port, - store_id="0x" + store_id.hex(), + store_id=store_id, changelist=changelist, fee=None, fingerprint=None, @@ -3369,7 +3369,7 @@ async def test_unsubmitted_batch_update( update_tx_rec1 = res["tx_id"] elif layer == InterfaceLayer.funcs: res = await submit_pending_root_cmd( - store_id="0x" + store_id.hex(), + store_id=store_id, fee=None, fingerprint=None, rpc_port=rpc_port, diff --git a/chia/_tests/pools/test_pool_cmdline.py b/chia/_tests/pools/test_pool_cmdline.py index 6f07963ddca0..2ab6e363f06f 100644 --- a/chia/_tests/pools/test_pool_cmdline.py +++ b/chia/_tests/pools/test_pool_cmdline.py @@ -6,28 +6,12 @@ import pytest from click.testing import CliRunner, Result -from chia.cmds.plotnft import create_cmd, show_cmd, validate_fee +from chia.cmds.plotnft import create_cmd, show_cmd pytestmark = pytest.mark.skip("TODO: Works locally but fails on CI, needs to be fixed!") class TestPoolNFTCommands: - def test_validate_fee(self): - with pytest.raises(click.exceptions.BadParameter): - r = validate_fee(None, "fee", "1.0") - - with pytest.raises(click.exceptions.BadParameter): - r = validate_fee(None, "fee", "-1") - - r = validate_fee(None, "fee", "0") - assert r == "0" - - r = validate_fee(None, "fee", "0.000000000001") - assert r == "0.000000000001" - - r = validate_fee(None, "fee", "0.5") - assert r == "0.5" - def test_plotnft_show(self): runner = CliRunner() result = runner.invoke(show_cmd, [], catch_exceptions=False) diff --git a/chia/cmds/cmds_util.py b/chia/cmds/cmds_util.py index a070177b9170..3312d020e32d 100644 --- a/chia/cmds/cmds_util.py +++ b/chia/cmds/cmds_util.py @@ -4,13 +4,13 @@ import logging import traceback from contextlib import asynccontextmanager -from decimal import Decimal from pathlib import Path from typing import Any, AsyncIterator, Callable, Dict, List, Optional, Tuple, Type, TypeVar import click from aiohttp import ClientConnectorCertificateError, ClientConnectorError +from chia.cmds.param_types import AmountParamType, Bytes32ParamType, CliAmount, cli_amount_none from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.daemon.keychain_proxy import KeychainProxy, connect_to_keychain_and_validate from chia.rpc.data_layer_rpc_client import DataLayerRpcClient @@ -25,9 +25,8 @@ from chia.util.config import load_config from chia.util.default_root import DEFAULT_ROOT_PATH from chia.util.errors import CliRpcConnectionError, InvalidPathError -from chia.util.ints import uint16, uint64 +from chia.util.ints import uint16 from chia.util.keychain import KeyData -from chia.util.streamable import Streamable, streamable from chia.wallet.transaction_record import TransactionRecord from chia.wallet.util.tx_config import CoinSelectionConfig, CoinSelectionConfigLoader, TXConfig, TXConfigLoader @@ -269,29 +268,31 @@ def coin_selection_args(func: Callable[..., None]) -> Callable[..., None]: "--min-coin-amount", "--min-amount", help="Ignore coins worth less then this much XCH or CAT units", - type=str, + type=AmountParamType(), required=False, - default=None, + default=cli_amount_none, )( click.option( "-l", "--max-coin-amount", "--max-amount", help="Ignore coins worth more then this much XCH or CAT units", - type=str, + type=AmountParamType(), required=False, - default=None, + default=cli_amount_none, )( click.option( "--exclude-coin", "coins_to_exclude", multiple=True, + type=Bytes32ParamType(), help="Exclude this coin from being spent.", )( click.option( "--exclude-amount", "amounts_to_exclude", multiple=True, + type=AmountParamType(), help="Exclude any coins with this XCH or CAT amount from being included.", )(func) ) @@ -327,28 +328,26 @@ def timelock_args(func: Callable[..., None]) -> Callable[..., None]: ) -@streamable @dataclasses.dataclass(frozen=True) -class CMDCoinSelectionConfigLoader(Streamable): - min_coin_amount: Optional[str] = None - max_coin_amount: Optional[str] = None - excluded_coin_amounts: Optional[List[str]] = None - excluded_coin_ids: Optional[List[str]] = None +class CMDCoinSelectionConfigLoader: + min_coin_amount: CliAmount = cli_amount_none + max_coin_amount: CliAmount = cli_amount_none + excluded_coin_amounts: Optional[List[CliAmount]] = None + excluded_coin_ids: Optional[List[bytes32]] = None def to_coin_selection_config(self, mojo_per_unit: int) -> CoinSelectionConfig: return CoinSelectionConfigLoader( - uint64(int(Decimal(self.min_coin_amount) * mojo_per_unit)) if self.min_coin_amount is not None else None, - uint64(int(Decimal(self.max_coin_amount) * mojo_per_unit)) if self.max_coin_amount is not None else None, + self.min_coin_amount.convert_amount_with_default(mojo_per_unit, None), + self.max_coin_amount.convert_amount_with_default(mojo_per_unit, None), ( - [uint64(int(Decimal(a) * mojo_per_unit)) for a in self.excluded_coin_amounts] + [cli_amount.convert_amount(mojo_per_unit) for cli_amount in self.excluded_coin_amounts] if self.excluded_coin_amounts is not None else None ), - [bytes32.from_hexstr(id) for id in self.excluded_coin_ids] if self.excluded_coin_ids is not None else None, + self.excluded_coin_ids, ).autofill(constants=DEFAULT_CONSTANTS) -@streamable @dataclasses.dataclass(frozen=True) class CMDTXConfigLoader(CMDCoinSelectionConfigLoader): reuse_puzhash: Optional[bool] = None diff --git a/chia/cmds/coin_funcs.py b/chia/cmds/coin_funcs.py index 5b0ca8f5ea90..bb415ae4b731 100644 --- a/chia/cmds/coin_funcs.py +++ b/chia/cmds/coin_funcs.py @@ -1,11 +1,10 @@ from __future__ import annotations import sys -from decimal import Decimal from typing import Dict, List, Optional, Sequence, Tuple, Union from chia.cmds.cmds_util import CMDCoinSelectionConfigLoader, CMDTXConfigLoader, cli_confirm, get_wallet_client -from chia.cmds.units import units +from chia.cmds.param_types import CliAmount from chia.cmds.wallet_funcs import get_mojo_per_unit, get_wallet_type, print_balance from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.sized_bytes import bytes32 @@ -22,10 +21,10 @@ async def async_list( wallet_rpc_port: Optional[int], fingerprint: Optional[int], wallet_id: int, - max_coin_amount: str, - min_coin_amount: str, - excluded_amounts: Sequence[str], - excluded_coin_ids: Sequence[str], + max_coin_amount: CliAmount, + min_coin_amount: CliAmount, + excluded_amounts: Sequence[CliAmount], + excluded_coin_ids: Sequence[bytes32], show_unconfirmed: bool, paginate: Optional[bool], ) -> None: @@ -104,9 +103,9 @@ def print_coins( print("Press q to quit, or c to continue") while True: entered_key = sys.stdin.read(1) - if entered_key == "q": + if entered_key.lower() == "q": return None - elif entered_key == "c": + elif entered_key.lower() == "c": break @@ -115,19 +114,16 @@ async def async_combine( wallet_rpc_port: Optional[int], fingerprint: Optional[int], wallet_id: int, - fee: Decimal, - max_coin_amount: str, - min_coin_amount: str, - excluded_amounts: Sequence[str], - # TODO: [add TXConfig args] add excluded_coin_ids + fee: uint64, + max_coin_amount: CliAmount, + min_coin_amount: CliAmount, + excluded_amounts: Sequence[CliAmount], number_of_coins: int, - target_coin_amount: Decimal, - target_coin_ids_str: Sequence[str], + target_coin_amount: CliAmount, + target_coin_ids: Sequence[bytes32], largest_first: bool, ) -> None: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): - target_coin_ids: List[bytes32] = [bytes32.from_hexstr(coin_id) for coin_id in target_coin_ids_str] - final_fee = uint64(int(fee * units["chia"])) if number_of_coins > 500: raise ValueError(f"{number_of_coins} coins is greater then the maximum limit of 500 coins.") try: @@ -140,19 +136,18 @@ async def async_combine( print("Wallet not synced. Please wait.") return is_xch: bool = wallet_type == WalletType.STANDARD_WALLET # this lets us know if we are directly combining Chia - tx_config = CMDTXConfigLoader( max_coin_amount=max_coin_amount, min_coin_amount=min_coin_amount, - excluded_coin_amounts=[*excluded_amounts, str(target_coin_amount)], # dont reuse coins of same amount. + excluded_coin_amounts=[*excluded_amounts, target_coin_amount], # dont reuse coins of same amount. # TODO: [add TXConfig args] add excluded_coin_ids ).to_tx_config(mojo_per_unit, config, fingerprint) - final_target_coin_amount = uint64(int(target_coin_amount * mojo_per_unit)) + final_target_coin_amount = target_coin_amount.convert_amount(mojo_per_unit) if final_target_coin_amount != 0: # if we have a set target, just use standard coin selection. removals: List[Coin] = await wallet_client.select_coins( - amount=(final_target_coin_amount + final_fee) if is_xch else final_target_coin_amount, + amount=(final_target_coin_amount + fee) if is_xch else final_target_coin_amount, wallet_id=wallet_id, coin_selection_config=tx_config.coin_selection_config, ) @@ -179,13 +174,13 @@ async def async_combine( print(f"Combining {len(removals)} coins.") cli_confirm("Would you like to Continue? (y/n): ") total_amount: uint128 = uint128(sum(coin.amount for coin in removals)) - if is_xch and total_amount - final_fee <= 0: + if is_xch and total_amount - fee <= 0: print("Total amount is less than 0 after fee, exiting.") return target_ph: bytes32 = decode_puzzle_hash(await wallet_client.get_next_address(wallet_id, False)) - additions = [{"amount": (total_amount - final_fee) if is_xch else total_amount, "puzzle_hash": target_ph}] + additions = [{"amount": (total_amount - fee) if is_xch else total_amount, "puzzle_hash": target_ph}] transaction: TransactionRecord = await wallet_client.send_transaction_multi( - wallet_id, additions, tx_config, removals, final_fee + wallet_id, additions, tx_config, removals, fee ) tx_id = transaction.name.hex() print(f"Transaction sent: {tx_id}") @@ -197,14 +192,13 @@ async def async_split( wallet_rpc_port: Optional[int], fingerprint: Optional[int], wallet_id: int, - fee: Decimal, + fee: uint64, number_of_coins: int, - amount_per_coin: Decimal, + amount_per_coin: CliAmount, target_coin_id_str: str, # TODO: [add TXConfig args] ) -> None: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): - final_fee = uint64(int(fee * units["chia"])) target_coin_id: bytes32 = bytes32.from_hexstr(target_coin_id_str) if number_of_coins > 500: print(f"{number_of_coins} coins is greater then the maximum limit of 500 coins.") @@ -219,10 +213,10 @@ async def async_split( print("Wallet not synced. Please wait.") return is_xch: bool = wallet_type == WalletType.STANDARD_WALLET # this lets us know if we are directly spitting Chia - final_amount_per_coin = uint64(int(amount_per_coin * mojo_per_unit)) + final_amount_per_coin = amount_per_coin.convert_amount(mojo_per_unit) total_amount = final_amount_per_coin * number_of_coins if is_xch: - total_amount += final_fee + total_amount += fee # get full coin record from name, and validate information about it. removal_coin_record: CoinRecord = (await wallet_client.get_coin_records_by_names([target_coin_id]))[0] if removal_coin_record.coin.amount < total_amount: @@ -243,7 +237,7 @@ async def async_split( ).to_tx_config(mojo_per_unit, config, fingerprint) transaction: TransactionRecord = await wallet_client.send_transaction_multi( - wallet_id, additions, tx_config, [removal_coin_record.coin], final_fee + wallet_id, additions, tx_config, [removal_coin_record.coin], fee ) tx_id = transaction.name.hex() print(f"Transaction sent: {tx_id}") @@ -252,8 +246,8 @@ async def async_split( spam_filter_after_n_txs = config.get("spam_filter_after_n_txs", 200) # how many txs to wait before filtering if final_amount_per_coin < dust_threshold and wallet_type == WalletType.STANDARD_WALLET: print( - f"WARNING: The amount per coin: {amount_per_coin} is less than the dust threshold: " - f"{dust_threshold / mojo_per_unit}. Some or all of the Coins " + f"WARNING: The amount per coin: {amount_per_coin.amount} is less than the dust threshold: " + f"{dust_threshold / (1 if amount_per_coin.mojos else mojo_per_unit)}. Some or all of the Coins " f"{'will' if number_of_coins > spam_filter_after_n_txs else 'may'} not show up in your wallet unless " f"you decrease the dust limit to below {final_amount_per_coin} mojos or disable it by setting it to 0." ) diff --git a/chia/cmds/coins.py b/chia/cmds/coins.py index d3d4f1e4d8dd..a91f60bf35d1 100644 --- a/chia/cmds/coins.py +++ b/chia/cmds/coins.py @@ -1,12 +1,14 @@ from __future__ import annotations import asyncio -from decimal import Decimal from typing import Optional, Sequence import click from chia.cmds import options +from chia.cmds.param_types import AmountParamType, Bytes32ParamType, CliAmount, cli_amount_none +from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.util.ints import uint64 @click.group("coins", help="Manage your wallets coins") @@ -29,25 +31,27 @@ def coins_cmd(ctx: click.Context) -> None: @click.option( "--min-amount", help="Ignore coins worth less then this much XCH or CAT units", - type=str, - default="0", + type=AmountParamType(), + default=cli_amount_none, ) @click.option( "--max-amount", help="Ignore coins worth more then this much XCH or CAT units", - type=str, - default="0", + type=AmountParamType(), + default=cli_amount_none, ) @click.option( "--exclude-coin", "coins_to_exclude", multiple=True, help="prevent this coin from being included.", + type=Bytes32ParamType(), ) @click.option( "--exclude-amount", "amounts_to_exclude", multiple=True, + type=AmountParamType(), help="Exclude any coins with this XCH or CAT amount from being included.", ) @click.option( @@ -62,10 +66,10 @@ def list_cmd( fingerprint: int, id: int, show_unconfirmed: bool, - min_amount: str, - max_amount: str, - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + min_amount: CliAmount, + max_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], paginate: Optional[bool], ) -> None: from .coin_funcs import async_list @@ -100,19 +104,20 @@ def list_cmd( "--target-amount", help="Select coins until this amount (in XCH or CAT) is reached. \ Combine all selected coins into one coin, which will have a value of at least target-amount", - type=str, - default="0", + type=AmountParamType(), + default=CliAmount(mojos=True, amount=uint64(0)), ) @click.option( "--min-amount", help="Ignore coins worth less then this much XCH or CAT units", - type=str, - default="0", + type=AmountParamType(), + default=cli_amount_none, ) @click.option( "--exclude-amount", "amounts_to_exclude", multiple=True, + type=AmountParamType(), help="Exclude any coins with this XCH or CAT amount from being included.", ) @click.option( @@ -126,23 +131,16 @@ def list_cmd( @click.option( "--max-amount", help="Ignore coins worth more then this much XCH or CAT units", - type=str, - default="0", # 0 means no limit -) -@click.option( - "-m", - "--fee", - help="Set the fees for the transaction, in XCH", - type=str, - default="0", - show_default=True, - required=True, + type=AmountParamType(), + default=cli_amount_none, ) +@options.create_fee() @click.option( "--input-coin", "input_coins", multiple=True, help="Only combine coins with these ids.", + type=Bytes32ParamType(), ) @click.option( "--largest-first/--smallest-first", @@ -154,13 +152,13 @@ def combine_cmd( wallet_rpc_port: Optional[int], fingerprint: int, id: int, - target_amount: str, - min_amount: str, - amounts_to_exclude: Sequence[str], + target_amount: CliAmount, + min_amount: CliAmount, + amounts_to_exclude: Sequence[CliAmount], number_of_coins: int, - max_amount: str, - fee: str, - input_coins: Sequence[str], + max_amount: CliAmount, + fee: uint64, + input_coins: Sequence[bytes32], largest_first: bool, ) -> None: from .coin_funcs import async_combine @@ -170,13 +168,13 @@ def combine_cmd( wallet_rpc_port=wallet_rpc_port, fingerprint=fingerprint, wallet_id=id, - fee=Decimal(fee), + fee=fee, max_coin_amount=max_amount, min_coin_amount=min_amount, excluded_amounts=amounts_to_exclude, number_of_coins=number_of_coins, - target_coin_amount=Decimal(target_amount), - target_coin_ids_str=input_coins, + target_coin_amount=target_amount, + target_coin_ids=input_coins, largest_first=largest_first, ) ) @@ -199,20 +197,12 @@ def combine_cmd( help="The number of coins we are creating.", required=True, ) -@click.option( - "-m", - "--fee", - help="Set the fees for the transaction, in XCH", - type=str, - default="0", - show_default=True, - required=True, -) +@options.create_fee() @click.option( "-a", "--amount-per-coin", help="The amount of each newly created coin, in XCH", - type=str, + type=AmountParamType(), required=True, ) @click.option("-t", "--target-coin-id", type=str, required=True, help="The coin id of the coin we are splitting.") @@ -221,8 +211,8 @@ def split_cmd( fingerprint: int, id: int, number_of_coins: int, - fee: str, - amount_per_coin: str, + fee: uint64, + amount_per_coin: CliAmount, target_coin_id: str, ) -> None: from .coin_funcs import async_split @@ -232,9 +222,9 @@ def split_cmd( wallet_rpc_port=wallet_rpc_port, fingerprint=fingerprint, wallet_id=id, - fee=Decimal(fee), + fee=fee, number_of_coins=number_of_coins, - amount_per_coin=Decimal(amount_per_coin), + amount_per_coin=amount_per_coin, target_coin_id_str=target_coin_id, ) ) diff --git a/chia/cmds/dao.py b/chia/cmds/dao.py index b54d488be2d4..4e5483c7e1a4 100644 --- a/chia/cmds/dao.py +++ b/chia/cmds/dao.py @@ -5,8 +5,12 @@ import click -from chia.cmds.cmds_util import tx_config_args -from chia.cmds.plotnft import validate_fee +from chia.cmds import options +from chia.cmds.cmds_util import CMDTXConfigLoader, tx_config_args +from chia.cmds.param_types import AmountParamType, Bytes32ParamType, CliAmount, TransactionFeeParamType, Uint64ParamType +from chia.cmds.units import units +from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.util.ints import uint64 @click.group("dao", short_help="Create, manage or show state of DAOs", no_args_is_help=True) @@ -33,33 +37,27 @@ def dao_cmd(ctx: click.Context) -> None: "-t", "--treasury-id", help="The Treasury ID of the DAO you want to track", - type=str, + type=Bytes32ParamType(), required=True, ) @click.option( "-a", "--filter-amount", help="The minimum number of votes a proposal needs before the wallet will recognise it", - type=int, - default=1, + type=Uint64ParamType(), + default=uint64(1), show_default=True, ) def dao_add_cmd( wallet_rpc_port: Optional[int], fingerprint: int, - treasury_id: str, - filter_amount: int, + treasury_id: bytes32, + filter_amount: uint64, name: Optional[str], ) -> None: from .dao_funcs import add_dao_wallet - extra_params = { - "name": name, - "treasury_id": treasury_id, - "filter_amount": filter_amount, - } - - asyncio.run(add_dao_wallet(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run(add_dao_wallet(wallet_rpc_port, fingerprint, name, treasury_id, filter_amount)) # ---------------------------------------------------------------------------------------- @@ -79,101 +77,92 @@ def dao_add_cmd( @click.option( "--proposal-timelock", help="The minimum number of blocks before a proposal can close", - type=int, - default=1000, + type=Uint64ParamType(), + default="1000", show_default=True, ) @click.option( "--soft-close", help="The number of blocks a proposal must remain unspent before closing", - type=int, - default=20, + type=Uint64ParamType(), + default="20", show_default=True, ) @click.option( "--attendance-required", help="The minimum number of votes a proposal must receive to be accepted", - type=int, + type=Uint64ParamType(), required=True, ) @click.option( "--pass-percentage", help="The percentage of 'yes' votes in basis points a proposal must receive to be accepted. 100% = 10000", - type=int, - default=5000, + type=Uint64ParamType(), + default="5000", show_default=True, ) @click.option( "--self-destruct", help="The number of blocks required before a proposal can be automatically removed", - type=int, - default=10000, + type=Uint64ParamType(), + default="10000", show_default=True, ) @click.option( "--oracle-delay", help="The number of blocks required between oracle spends of the treasury", - type=int, - default=50, + type=Uint64ParamType(), + default="50", show_default=True, ) @click.option( "--proposal-minimum", help="The minimum amount (in xch) that a proposal must use to be created", - type=str, - default="0.000000000001", + type=AmountParamType(), + default="1", show_default=True, ) @click.option( "--filter-amount", help="The minimum number of votes a proposal needs before the wallet will recognise it", - type=int, - default=1, + type=Uint64ParamType(), + default="1", show_default=True, ) @click.option( "--cat-amount", help="The number of DAO CATs (in mojos) to create when initializing the DAO", - type=int, + type=AmountParamType(), required=True, ) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @click.option( "--fee-for-cat", help="Set the fees for the CAT creation transaction, in XCH.", - type=str, + type=TransactionFeeParamType(), default="0", show_default=True, - callback=validate_fee, ) @tx_config_args def dao_create_cmd( wallet_rpc_port: Optional[int], fingerprint: int, - proposal_timelock: int, - soft_close: int, - attendance_required: int, - pass_percentage: int, - self_destruct: int, - oracle_delay: int, - proposal_minimum: str, - filter_amount: int, - cat_amount: int, + proposal_timelock: uint64, + soft_close: uint64, + attendance_required: uint64, + pass_percentage: uint64, + self_destruct: uint64, + oracle_delay: uint64, + proposal_minimum: CliAmount, + filter_amount: uint64, + cat_amount: CliAmount, name: Optional[str], - fee: str, - fee_for_cat: str, - min_coin_amount: Optional[str], - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + fee: uint64, + fee_for_cat: uint64, + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], ) -> None: from .dao_funcs import create_dao_wallet @@ -183,26 +172,31 @@ def dao_create_cmd( print("Creating new DAO") - extra_params = { - "fee": fee, - "fee_for_cat": fee_for_cat, - "name": name, - "proposal_timelock": proposal_timelock, - "soft_close_length": soft_close, - "attendance_required": attendance_required, - "pass_percentage": pass_percentage, - "self_destruct_length": self_destruct, - "oracle_spend_delay": oracle_delay, - "proposal_minimum_amount": proposal_minimum, - "filter_amount": filter_amount, - "amount_of_cats": cat_amount, - "min_coin_amount": min_coin_amount, - "max_coin_amount": max_coin_amount, - "coins_to_exclude": coins_to_exclude, - "amounts_to_exclude": amounts_to_exclude, - "reuse_puzhash": reuse, - } - asyncio.run(create_dao_wallet(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run( + create_dao_wallet( + wallet_rpc_port, + fingerprint, + fee, + fee_for_cat, + name, + proposal_timelock, + soft_close, + attendance_required, + pass_percentage, + self_destruct, + oracle_delay, + proposal_minimum.convert_amount(units["chia"]), + filter_amount, + cat_amount, + CMDTXConfigLoader( + min_coin_amount=min_coin_amount, + max_coin_amount=max_coin_amount, + excluded_coin_ids=list(coins_to_exclude), + excluded_coin_amounts=list(amounts_to_exclude), + reuse_puzhash=reuse, + ), + ) + ) # ---------------------------------------------------------------------------------------- @@ -226,10 +220,7 @@ def dao_get_id_cmd( ) -> None: from .dao_funcs import get_treasury_id - extra_params = { - "wallet_id": wallet_id, - } - asyncio.run(get_treasury_id(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run(get_treasury_id(wallet_rpc_port, fingerprint, wallet_id)) @dao_cmd.command("add_funds", short_help="Send funds to a DAO treasury", no_args_is_help=True) @@ -253,46 +244,43 @@ def dao_get_id_cmd( "-a", "--amount", help="The amount of funds to send", - type=str, + type=AmountParamType(), required=True, ) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @tx_config_args def dao_add_funds_cmd( wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, funding_wallet_id: int, - amount: str, - fee: str, - min_coin_amount: Optional[str], - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + amount: CliAmount, + fee: uint64, + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], ) -> None: from .dao_funcs import add_funds_to_treasury - extra_params = { - "wallet_id": wallet_id, - "fee": fee, - "funding_wallet_id": funding_wallet_id, - "amount": amount, - "min_coin_amount": min_coin_amount, - "max_coin_amount": max_coin_amount, - "coins_to_exclude": coins_to_exclude, - "amounts_to_exclude": amounts_to_exclude, - "reuse_puzhash": reuse, - } - asyncio.run(add_funds_to_treasury(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run( + add_funds_to_treasury( + wallet_rpc_port, + fingerprint, + wallet_id, + funding_wallet_id, + amount, + fee, + CMDTXConfigLoader( + min_coin_amount=min_coin_amount, + max_coin_amount=max_coin_amount, + excluded_coin_ids=list(coins_to_exclude), + excluded_coin_amounts=list(amounts_to_exclude), + reuse_puzhash=reuse, + ), + ) + ) @dao_cmd.command("balance", short_help="Get the asset balances for a DAO treasury", no_args_is_help=True) @@ -312,10 +300,7 @@ def dao_get_balance_cmd( ) -> None: from .dao_funcs import get_treasury_balance - extra_params = { - "wallet_id": wallet_id, - } - asyncio.run(get_treasury_balance(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run(get_treasury_balance(wallet_rpc_port, fingerprint, wallet_id)) @dao_cmd.command("rules", short_help="Get the current rules governing the DAO", no_args_is_help=True) @@ -335,10 +320,7 @@ def dao_rules_cmd( ) -> None: from .dao_funcs import get_rules - extra_params = { - "wallet_id": wallet_id, - } - asyncio.run(get_rules(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run(get_rules(wallet_rpc_port, fingerprint, wallet_id)) # ---------------------------------------------------------------------------------------- @@ -372,11 +354,7 @@ def dao_list_proposals_cmd( if not include_closed: include_closed = False - extra_params = { - "wallet_id": wallet_id, - "include_closed": include_closed, - } - asyncio.run(list_proposals(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run(list_proposals(wallet_rpc_port, fingerprint, wallet_id, include_closed)) @dao_cmd.command("show_proposal", short_help="Show the details of a specific proposal", no_args_is_help=True) @@ -404,11 +382,7 @@ def dao_show_proposal_cmd( ) -> None: from .dao_funcs import show_proposal - extra_params = { - "wallet_id": wallet_id, - "proposal_id": proposal_id, - } - asyncio.run(show_proposal(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run(show_proposal(wallet_rpc_port, fingerprint, wallet_id, proposal_id)) # ---------------------------------------------------------------------------------------- @@ -436,7 +410,7 @@ def dao_show_proposal_cmd( "-a", "--vote-amount", help="The number of votes you want to cast", - type=int, + type=Uint64ParamType(), required=True, ) @click.option( @@ -445,47 +419,44 @@ def dao_show_proposal_cmd( help="Use this option to vote against a proposal. If not present then the vote is for the proposal", is_flag=True, ) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @tx_config_args def dao_vote_cmd( wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, proposal_id: str, - vote_amount: int, + vote_amount: uint64, vote_no: Optional[bool], - fee: str, - min_coin_amount: Optional[str], - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + fee: uint64, + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], ) -> None: from .dao_funcs import vote_on_proposal is_yes_vote = False if vote_no else True - extra_params = { - "wallet_id": wallet_id, - "fee": fee, - "proposal_id": proposal_id, - "vote_amount": vote_amount, - "is_yes_vote": is_yes_vote, - "min_coin_amount": min_coin_amount, - "max_coin_amount": max_coin_amount, - "coins_to_exclude": coins_to_exclude, - "amounts_to_exclude": amounts_to_exclude, - "reuse_puzhash": reuse, - } - asyncio.run(vote_on_proposal(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run( + vote_on_proposal( + wallet_rpc_port, + fingerprint, + wallet_id, + proposal_id, + vote_amount, + is_yes_vote, + fee, + CMDTXConfigLoader( + min_coin_amount=min_coin_amount, + max_coin_amount=max_coin_amount, + excluded_coin_ids=list(coins_to_exclude), + excluded_coin_amounts=list(amounts_to_exclude), + reuse_puzhash=reuse, + ), + ) + ) # ---------------------------------------------------------------------------------------- @@ -516,15 +487,7 @@ def dao_vote_cmd( is_flag=True, default=False, ) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @tx_config_args def dao_close_proposal_cmd( wallet_rpc_port: Optional[int], @@ -532,27 +495,32 @@ def dao_close_proposal_cmd( wallet_id: int, proposal_id: str, self_destruct: bool, - fee: str, - min_coin_amount: Optional[str], - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + fee: uint64, + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], ) -> None: from .dao_funcs import close_proposal - extra_params = { - "wallet_id": wallet_id, - "fee": fee, - "proposal_id": proposal_id, - "self_destruct": self_destruct, - "min_coin_amount": min_coin_amount, - "max_coin_amount": max_coin_amount, - "coins_to_exclude": coins_to_exclude, - "amounts_to_exclude": amounts_to_exclude, - "reuse_puzhash": reuse, - } - asyncio.run(close_proposal(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run( + close_proposal( + wallet_rpc_port, + fingerprint, + wallet_id, + fee, + proposal_id, + self_destruct, + CMDTXConfigLoader( + min_coin_amount=min_coin_amount, + max_coin_amount=max_coin_amount, + excluded_coin_ids=list(coins_to_exclude), + excluded_coin_amounts=list(amounts_to_exclude), + reuse_puzhash=reuse, + ), + ) + ) # ---------------------------------------------------------------------------------------- @@ -573,44 +541,41 @@ def dao_close_proposal_cmd( "-a", "--amount", help="The amount of CATs (not mojos) to lock in voting mode", - type=str, + type=AmountParamType(), required=True, ) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @tx_config_args def dao_lockup_coins_cmd( wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, - amount: str, - fee: str, - min_coin_amount: Optional[str], - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + amount: CliAmount, + fee: uint64, + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], ) -> None: from .dao_funcs import lockup_coins - extra_params = { - "wallet_id": wallet_id, - "fee": fee, - "amount": amount, - "min_coin_amount": min_coin_amount, - "max_coin_amount": max_coin_amount, - "coins_to_exclude": coins_to_exclude, - "amounts_to_exclude": amounts_to_exclude, - "reuse_puzhash": reuse, - } - asyncio.run(lockup_coins(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run( + lockup_coins( + wallet_rpc_port, + fingerprint, + wallet_id, + amount, + fee, + CMDTXConfigLoader( + min_coin_amount=min_coin_amount, + max_coin_amount=max_coin_amount, + excluded_coin_ids=list(coins_to_exclude), + excluded_coin_amounts=list(amounts_to_exclude), + reuse_puzhash=reuse, + ), + ) + ) @dao_cmd.command("release_coins", short_help="Release closed proposals from DAO CATs", no_args_is_help=True) @@ -623,39 +588,36 @@ def dao_lockup_coins_cmd( ) @click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int) @click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @tx_config_args def dao_release_coins_cmd( wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, - fee: str, - min_coin_amount: Optional[str], - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + fee: uint64, + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], ) -> None: from .dao_funcs import release_coins - extra_params = { - "wallet_id": wallet_id, - "fee": fee, - "min_coin_amount": min_coin_amount, - "max_coin_amount": max_coin_amount, - "coins_to_exclude": coins_to_exclude, - "amounts_to_exclude": amounts_to_exclude, - "reuse_puzhash": reuse, - } - asyncio.run(release_coins(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run( + release_coins( + wallet_rpc_port, + fingerprint, + wallet_id, + fee, + CMDTXConfigLoader( + min_coin_amount=min_coin_amount, + max_coin_amount=max_coin_amount, + excluded_coin_ids=list(coins_to_exclude), + excluded_coin_amounts=list(amounts_to_exclude), + reuse_puzhash=reuse, + ), + ) + ) @dao_cmd.command("exit_lockup", short_help="Release DAO CATs from voting mode", no_args_is_help=True) @@ -668,39 +630,36 @@ def dao_release_coins_cmd( ) @click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int) @click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @tx_config_args def dao_exit_lockup_cmd( wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, - fee: str, - min_coin_amount: Optional[str], - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + fee: uint64, + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], ) -> None: from .dao_funcs import exit_lockup - extra_params = { - "wallet_id": wallet_id, - "fee": fee, - "min_coin_amount": min_coin_amount, - "max_coin_amount": max_coin_amount, - "coins_to_exclude": coins_to_exclude, - "amounts_to_exclude": amounts_to_exclude, - "reuse_puzhash": reuse, - } - asyncio.run(exit_lockup(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run( + exit_lockup( + wallet_rpc_port, + fingerprint, + wallet_id, + fee, + CMDTXConfigLoader( + min_coin_amount=min_coin_amount, + max_coin_amount=max_coin_amount, + excluded_coin_ids=list(coins_to_exclude), + excluded_coin_amounts=list(amounts_to_exclude), + reuse_puzhash=reuse, + ), + ) + ) # ---------------------------------------------------------------------------------------- @@ -735,7 +694,7 @@ def dao_proposal(ctx: click.Context) -> None: "-a", "--amount", help="The amount of funds the proposal will send (in mojos)", - type=float, + type=str, required=False, default=None, ) @@ -762,49 +721,46 @@ def dao_proposal(ctx: click.Context) -> None: required=False, default=None, ) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @tx_config_args def dao_create_spend_proposal_cmd( wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, + fee: uint64, vote_amount: Optional[int], to_address: Optional[str], amount: Optional[str], asset_id: Optional[str], from_json: Optional[str], - fee: str, - min_coin_amount: Optional[str], - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], ) -> None: from .dao_funcs import create_spend_proposal - extra_params = { - "wallet_id": wallet_id, - "fee": fee, - "vote_amount": vote_amount, - "to_address": to_address, - "amount": amount, - "asset_id": asset_id, - "from_json": from_json, - "min_coin_amount": min_coin_amount, - "max_coin_amount": max_coin_amount, - "coins_to_exclude": coins_to_exclude, - "amounts_to_exclude": amounts_to_exclude, - "reuse_puzhash": reuse, - } - asyncio.run(create_spend_proposal(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run( + create_spend_proposal( + wallet_rpc_port, + fingerprint, + wallet_id, + fee, + vote_amount, + to_address, + amount, + asset_id, + from_json, + CMDTXConfigLoader( + min_coin_amount=min_coin_amount, + max_coin_amount=max_coin_amount, + excluded_coin_ids=list(coins_to_exclude), + excluded_coin_amounts=list(amounts_to_exclude), + reuse_puzhash=reuse, + ), + ) + ) @dao_proposal.command("update", short_help="Create a proposal to change the DAO rules", no_args_is_help=True) @@ -821,99 +777,96 @@ def dao_create_spend_proposal_cmd( "-v", "--vote-amount", help="The number of votes to add", - type=int, + type=Uint64ParamType(), required=False, default=None, ) @click.option( "--proposal-timelock", help="The new minimum number of blocks before a proposal can close", - type=int, + type=Uint64ParamType(), default=None, required=False, ) @click.option( "--soft-close", help="The number of blocks a proposal must remain unspent before closing", - type=int, + type=Uint64ParamType(), default=None, required=False, ) @click.option( "--attendance-required", help="The minimum number of votes a proposal must receive to be accepted", - type=int, + type=Uint64ParamType(), default=None, required=False, ) @click.option( "--pass-percentage", help="The percentage of 'yes' votes in basis points a proposal must receive to be accepted. 100% = 10000", - type=int, + type=Uint64ParamType(), default=None, required=False, ) @click.option( "--self-destruct", help="The number of blocks required before a proposal can be automatically removed", - type=int, + type=Uint64ParamType(), default=None, required=False, ) @click.option( "--oracle-delay", help="The number of blocks required between oracle spends of the treasury", - type=int, + type=Uint64ParamType(), default=None, required=False, ) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @tx_config_args def dao_create_update_proposal_cmd( wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, - vote_amount: Optional[int], - proposal_timelock: Optional[int], - soft_close: Optional[int], - attendance_required: Optional[int], - pass_percentage: Optional[int], - self_destruct: Optional[int], - oracle_delay: Optional[int], - fee: str, - min_coin_amount: Optional[str], - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + fee: uint64, + vote_amount: Optional[uint64], + proposal_timelock: Optional[uint64], + soft_close: Optional[uint64], + attendance_required: Optional[uint64], + pass_percentage: Optional[uint64], + self_destruct: Optional[uint64], + oracle_delay: Optional[uint64], + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], ) -> None: from .dao_funcs import create_update_proposal - extra_params = { - "wallet_id": wallet_id, - "fee": fee, - "vote_amount": vote_amount, - "proposal_timelock": proposal_timelock, - "soft_close_length": soft_close, - "attendance_required": attendance_required, - "pass_percentage": pass_percentage, - "self_destruct_length": self_destruct, - "oracle_spend_delay": oracle_delay, - "min_coin_amount": min_coin_amount, - "max_coin_amount": max_coin_amount, - "coins_to_exclude": coins_to_exclude, - "amounts_to_exclude": amounts_to_exclude, - "reuse_puzhash": reuse, - } - asyncio.run(create_update_proposal(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run( + create_update_proposal( + wallet_rpc_port, + fingerprint, + wallet_id, + fee, + vote_amount, + proposal_timelock, + soft_close, + attendance_required, + pass_percentage, + self_destruct, + oracle_delay, + CMDTXConfigLoader( + min_coin_amount=min_coin_amount, + max_coin_amount=max_coin_amount, + excluded_coin_ids=list(coins_to_exclude), + excluded_coin_amounts=list(amounts_to_exclude), + reuse_puzhash=reuse, + ), + ) + ) @dao_proposal.command("mint", short_help="Create a proposal to mint new DAO CATs", no_args_is_help=True) @@ -930,7 +883,7 @@ def dao_create_update_proposal_cmd( "-a", "--amount", help="The amount of new cats the proposal will mint (in mojos)", - type=int, + type=Uint64ParamType(), required=True, ) @click.option( @@ -949,45 +902,42 @@ def dao_create_update_proposal_cmd( required=False, default=None, ) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @tx_config_args def dao_create_mint_proposal_cmd( wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, - amount: int, - to_address: int, + fee: uint64, + amount: uint64, + to_address: str, vote_amount: Optional[int], - fee: str, - min_coin_amount: Optional[str], - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], - amounts_to_exclude: Sequence[str], + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], + amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], ) -> None: from .dao_funcs import create_mint_proposal - extra_params = { - "wallet_id": wallet_id, - "fee": fee, - "amount": amount, - "cat_target_address": to_address, - "vote_amount": vote_amount, - "min_coin_amount": min_coin_amount, - "max_coin_amount": max_coin_amount, - "coins_to_exclude": coins_to_exclude, - "amounts_to_exclude": amounts_to_exclude, - "reuse_puzhash": reuse, - } - asyncio.run(create_mint_proposal(extra_params, wallet_rpc_port, fingerprint)) + asyncio.run( + create_mint_proposal( + wallet_rpc_port, + fingerprint, + wallet_id, + fee, + amount, + to_address, + vote_amount, + CMDTXConfigLoader( + min_coin_amount=min_coin_amount, + max_coin_amount=max_coin_amount, + excluded_coin_ids=list(coins_to_exclude), + excluded_coin_amounts=list(amounts_to_exclude), + reuse_puzhash=reuse, + ), + ) + ) # ---------------------------------------------------------------------------------------- diff --git a/chia/cmds/dao_funcs.py b/chia/cmds/dao_funcs.py index 2a0c73e3291b..e81e38d32f2a 100644 --- a/chia/cmds/dao_funcs.py +++ b/chia/cmds/dao_funcs.py @@ -4,9 +4,10 @@ import json import time from decimal import Decimal -from typing import Any, Dict, Optional +from typing import Optional from chia.cmds.cmds_util import CMDTXConfigLoader, get_wallet_client, transaction_status_msg, transaction_submitted_msg +from chia.cmds.param_types import CliAmount from chia.cmds.units import units from chia.cmds.wallet_funcs import get_mojo_per_unit, get_wallet_type from chia.types.blockchain_format.sized_bytes import bytes32 @@ -17,20 +18,16 @@ from chia.wallet.util.wallet_types import WalletType -async def add_dao_wallet(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - treasury_id = args["treasury_id"] - filter_amount = args["filter_amount"] - name = args["name"] - +async def add_dao_wallet( + wallet_rpc_port: Optional[int], fp: int, name: Optional[str], treasury_id: bytes32, filter_amount: uint64 +) -> None: print(f"Adding wallet for DAO: {treasury_id}") print("This may take awhile.") async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.create_new_dao_wallet( mode="existing", - tx_config=CMDTXConfigLoader.from_json_dict({"reuse_puzhash": True}).to_tx_config( - units["chia"], config, fingerprint - ), + tx_config=CMDTXConfigLoader(reuse_puzhash=True).to_tx_config(units["chia"], config, fingerprint), dao_rules=None, amount_of_cats=None, treasury_id=treasury_id, @@ -45,31 +42,36 @@ async def add_dao_wallet(args: Dict[str, Any], wallet_rpc_port: Optional[int], f print(f"DAOCAT Wallet ID: {res['dao_cat_wallet_id']}") -async def create_dao_wallet(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - proposal_minimum = uint64(int(Decimal(args["proposal_minimum_amount"]) * units["chia"])) - +async def create_dao_wallet( + wallet_rpc_port: Optional[int], + fp: int, + fee: uint64, + fee_for_cat: uint64, + name: Optional[str], + proposal_timelock: uint64, + soft_close: uint64, + attendance_required: uint64, + pass_percentage: uint64, + self_destruct: uint64, + oracle_delay: uint64, + proposal_minimum: uint64, + filter_amount: uint64, + cat_amount: CliAmount, + cli_tx_config: CMDTXConfigLoader, +) -> None: if proposal_minimum % 2 == 0: proposal_minimum = uint64(1 + proposal_minimum) print("Adding 1 mojo to proposal minimum amount") dao_rules = { - "proposal_timelock": args["proposal_timelock"], - "soft_close_length": args["soft_close_length"], - "attendance_required": args["attendance_required"], - "pass_percentage": args["pass_percentage"], - "self_destruct_length": args["self_destruct_length"], - "oracle_spend_delay": args["oracle_spend_delay"], + "proposal_timelock": proposal_timelock, + "soft_close_length": soft_close, + "attendance_required": attendance_required, + "pass_percentage": pass_percentage, + "self_destruct_length": self_destruct, + "oracle_spend_delay": oracle_delay, "proposal_minimum_amount": proposal_minimum, } - amount_of_cats = args["amount_of_cats"] - filter_amount = args["filter_amount"] - name = args["name"] - - fee = Decimal(args["fee"]) - final_fee: uint64 = uint64(int(fee * units["chia"])) - - fee_for_cat = Decimal(args["fee_for_cat"]) - final_fee_for_cat: uint64 = uint64(int(fee_for_cat * units["chia"])) async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): conf_coins, _, _ = await wallet_client.get_spendable_coins( @@ -80,21 +82,13 @@ async def create_dao_wallet(args: Dict[str, Any], wallet_rpc_port: Optional[int] res = await wallet_client.create_new_dao_wallet( mode="new", dao_rules=dao_rules, - amount_of_cats=amount_of_cats, + amount_of_cats=cat_amount.convert_amount(units["mojo"]), treasury_id=None, filter_amount=filter_amount, name=name, - fee=final_fee, - fee_for_cat=final_fee_for_cat, - tx_config=CMDTXConfigLoader.from_json_dict( - { - "min_coin_amount": args["min_coin_amount"], - "max_coin_amount": args["max_coin_amount"], - "coins_to_exclude": args["coins_to_exclude"], - "amounts_to_exclude": args["amounts_to_exclude"], - "reuse_puzhash": args["reuse_puzhash"], - } - ).to_tx_config(units["chia"], config, fingerprint), + fee=fee, + fee_for_cat=fee_for_cat, + tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), ) print("Successfully created DAO Wallet") @@ -104,18 +98,14 @@ async def create_dao_wallet(args: Dict[str, Any], wallet_rpc_port: Optional[int] print(f"DAOCAT Wallet ID: {res['dao_cat_wallet_id']}") -async def get_treasury_id(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - +async def get_treasury_id(wallet_rpc_port: Optional[int], fp: int, wallet_id: int) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): res = await wallet_client.dao_get_treasury_id(wallet_id=wallet_id) treasury_id = res["treasury_id"] print(f"Treasury ID: {treasury_id}") -async def get_rules(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - +async def get_rules(wallet_rpc_port: Optional[int], fp: int, wallet_id: int) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): res = await wallet_client.dao_get_rules(wallet_id=wallet_id) rules = res["rules"] @@ -123,11 +113,15 @@ async def get_rules(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: in print(f"{rule}: {val}") -async def add_funds_to_treasury(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - funding_wallet_id = args["funding_wallet_id"] - amount = Decimal(args["amount"]) - +async def add_funds_to_treasury( + wallet_rpc_port: Optional[int], + fp: int, + wallet_id: int, + funding_wallet_id: int, + amount: CliAmount, + fee: uint64, + cli_tx_config: CMDTXConfigLoader, +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: typ = await get_wallet_type(wallet_id=funding_wallet_id, wallet_client=wallet_client) @@ -136,24 +130,12 @@ async def add_funds_to_treasury(args: Dict[str, Any], wallet_rpc_port: Optional[ print(f"Wallet id: {wallet_id} not found.") return - fee = Decimal(args["fee"]) - final_fee: uint64 = uint64(int(fee * units["chia"])) - final_amount: uint64 = uint64(int(amount * mojo_per_unit)) - res = await wallet_client.dao_add_funds_to_treasury( wallet_id=wallet_id, funding_wallet_id=funding_wallet_id, - amount=final_amount, - fee=final_fee, - tx_config=CMDTXConfigLoader.from_json_dict( - { - "min_coin_amount": args["min_coin_amount"], - "max_coin_amount": args["max_coin_amount"], - "coins_to_exclude": args["coins_to_exclude"], - "amounts_to_exclude": args["amounts_to_exclude"], - "reuse_puzhash": args["reuse_puzhash"], - } - ).to_tx_config(units["chia"], config, fingerprint), + amount=amount.convert_amount(mojo_per_unit), + fee=fee, + tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), ) tx_id = res["tx_id"] @@ -169,9 +151,7 @@ async def add_funds_to_treasury(args: Dict[str, Any], wallet_rpc_port: Optional[ print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover -async def get_treasury_balance(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - +async def get_treasury_balance(wallet_rpc_port: Optional[int], fp: int, wallet_id: int) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): res = await wallet_client.dao_get_treasury_balance(wallet_id=wallet_id) balances = res["balances"] @@ -189,10 +169,7 @@ async def get_treasury_balance(args: Dict[str, Any], wallet_rpc_port: Optional[i print(f"{asset_id}: {balance / cat_mojos}") -async def list_proposals(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - include_closed = args["include_closed"] - +async def list_proposals(wallet_rpc_port: Optional[int], fp: int, wallet_id: int, include_closed: bool) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): res = await wallet_client.dao_get_proposals(wallet_id=wallet_id, include_closed=include_closed) proposals = res["proposals"] @@ -210,10 +187,7 @@ async def list_proposals(args: Dict[str, Any], wallet_rpc_port: Optional[int], f print("############################") -async def show_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - proposal_id = args["proposal_id"] - +async def show_proposal(wallet_rpc_port: Optional[int], fp: int, wallet_id: int, proposal_id: str) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, config): res = await wallet_client.dao_parse_proposal(wallet_id, proposal_id) pd = res["proposal_dictionary"] @@ -286,30 +260,24 @@ async def show_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp print(f"Address: {address}") -async def vote_on_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - vote_amount = args["vote_amount"] - fee = args["fee"] - final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"])) - proposal_id = args["proposal_id"] - is_yes_vote = args["is_yes_vote"] - +async def vote_on_proposal( + wallet_rpc_port: Optional[int], + fp: int, + wallet_id: int, + proposal_id: str, + vote_amount: uint64, + is_yes_vote: bool, + fee: uint64, + cli_tx_config: CMDTXConfigLoader, +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_vote_on_proposal( wallet_id=wallet_id, proposal_id=proposal_id, vote_amount=vote_amount, is_yes_vote=is_yes_vote, - fee=final_fee, - tx_config=CMDTXConfigLoader.from_json_dict( - { - "min_coin_amount": args["min_coin_amount"], - "max_coin_amount": args["max_coin_amount"], - "coins_to_exclude": args["coins_to_exclude"], - "amounts_to_exclude": args["amounts_to_exclude"], - "reuse_puzhash": args["reuse_puzhash"], - } - ).to_tx_config(units["chia"], config, fingerprint), + fee=fee, + tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), ) tx_id = res["tx_id"] start = time.time() @@ -324,27 +292,22 @@ async def vote_on_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover -async def close_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - fee = args["fee"] - final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"])) - proposal_id = args["proposal_id"] - self_destruct = args["self_destruct"] +async def close_proposal( + wallet_rpc_port: Optional[int], + fp: int, + wallet_id: int, + fee: uint64, + proposal_id: str, + self_destruct: bool, + cli_tx_config: CMDTXConfigLoader, +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_close_proposal( wallet_id=wallet_id, proposal_id=proposal_id, - fee=final_fee, + fee=fee, self_destruct=self_destruct, - tx_config=CMDTXConfigLoader.from_json_dict( - { - "min_coin_amount": args["min_coin_amount"], - "max_coin_amount": args["max_coin_amount"], - "coins_to_exclude": args["coins_to_exclude"], - "amounts_to_exclude": args["amounts_to_exclude"], - "reuse_puzhash": args["reuse_puzhash"], - } - ).to_tx_config(units["chia"], config, fingerprint), + tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), ) tx_id = res["tx_id"] start = time.time() @@ -359,26 +322,21 @@ async def close_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], f print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover -async def lockup_coins(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - amount = args["amount"] - final_amount: uint64 = uint64(int(Decimal(amount) * units["cat"])) - fee = args["fee"] - final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"])) +async def lockup_coins( + wallet_rpc_port: Optional[int], + fp: int, + wallet_id: int, + amount: CliAmount, + fee: uint64, + cli_tx_config: CMDTXConfigLoader, +) -> None: + final_amount: uint64 = amount.convert_amount(units["cat"]) async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_send_to_lockup( wallet_id=wallet_id, amount=final_amount, - fee=final_fee, - tx_config=CMDTXConfigLoader.from_json_dict( - { - "min_coin_amount": args["min_coin_amount"], - "max_coin_amount": args["max_coin_amount"], - "coins_to_exclude": args["coins_to_exclude"], - "amounts_to_exclude": args["amounts_to_exclude"], - "reuse_puzhash": args["reuse_puzhash"], - } - ).to_tx_config(units["chia"], config, fingerprint), + fee=fee, + tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), ) tx_id = res["tx_id"] start = time.time() @@ -393,23 +351,18 @@ async def lockup_coins(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover -async def release_coins(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - fee = args["fee"] - final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"])) +async def release_coins( + wallet_rpc_port: Optional[int], + fp: int, + wallet_id: int, + fee: uint64, + cli_tx_config: CMDTXConfigLoader, +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_free_coins_from_finished_proposals( wallet_id=wallet_id, - fee=final_fee, - tx_config=CMDTXConfigLoader.from_json_dict( - { - "min_coin_amount": args["min_coin_amount"], - "max_coin_amount": args["max_coin_amount"], - "coins_to_exclude": args["coins_to_exclude"], - "amounts_to_exclude": args["amounts_to_exclude"], - "reuse_puzhash": args["reuse_puzhash"], - } - ).to_tx_config(units["chia"], config, fingerprint), + fee=fee, + tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), ) tx_id = res["tx_id"] start = time.time() @@ -423,24 +376,19 @@ async def release_coins(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover -async def exit_lockup(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - fee = args["fee"] - final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"])) +async def exit_lockup( + wallet_rpc_port: Optional[int], + fp: int, + wallet_id: int, + fee: uint64, + cli_tx_config: CMDTXConfigLoader, +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_exit_lockup( wallet_id=wallet_id, coins=[], - fee=final_fee, - tx_config=CMDTXConfigLoader.from_json_dict( - { - "min_coin_amount": args["min_coin_amount"], - "max_coin_amount": args["max_coin_amount"], - "coins_to_exclude": args["coins_to_exclude"], - "amounts_to_exclude": args["amounts_to_exclude"], - "reuse_puzhash": args["reuse_puzhash"], - } - ).to_tx_config(units["chia"], config, fingerprint), + fee=fee, + tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), ) tx_id = res["tx_id"] start = time.time() @@ -454,14 +402,18 @@ async def exit_lockup(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover -async def create_spend_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - fee = args["fee"] - final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"])) - asset_id = args.get("asset_id") - address = args.get("to_address") - amount = args.get("amount") - additions_file = args.get("from_json") +async def create_spend_proposal( + wallet_rpc_port: Optional[int], + fp: int, + wallet_id: int, + fee: uint64, + vote_amount: Optional[int], + address: Optional[str], + amount: Optional[str], + asset_id: Optional[str], + additions_file: Optional[str], + cli_tx_config: CMDTXConfigLoader, +) -> None: if additions_file is None and (address is None or amount is None): raise ValueError("Must include a json specification or an address / amount pair.") if additions_file: # pragma: no cover @@ -474,7 +426,6 @@ async def create_spend_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[ additions.append(addition) else: additions = None - vote_amount = args.get("vote_amount") async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): wallet_type = await get_wallet_type(wallet_id=wallet_id, wallet_client=wallet_client) mojo_per_unit = get_mojo_per_unit(wallet_type=wallet_type) @@ -487,16 +438,8 @@ async def create_spend_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[ inner_address=address, asset_id=asset_id, vote_amount=vote_amount, - fee=final_fee, - tx_config=CMDTXConfigLoader.from_json_dict( - { - "min_coin_amount": args["min_coin_amount"], - "max_coin_amount": args["max_coin_amount"], - "coins_to_exclude": args["coins_to_exclude"], - "amounts_to_exclude": args["amounts_to_exclude"], - "reuse_puzhash": args["reuse_puzhash"], - } - ).to_tx_config(units["chia"], config, fingerprint), + fee=fee, + tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), ) if res["success"]: asset_id_name = asset_id if asset_id else "XCH" @@ -507,17 +450,20 @@ async def create_spend_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[ print("Failed to create proposal.") -async def create_update_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - fee = Decimal(args["fee"]) - final_fee: uint64 = uint64(int(fee * units["chia"])) - proposal_timelock = args.get("proposal_timelock") - soft_close_length = args.get("soft_close_length") - attendance_required = args.get("attendance_required") - pass_percentage = args.get("pass_percentage") - self_destruct_length = args.get("self_destruct_length") - oracle_spend_delay = args.get("oracle_spend_delay") - vote_amount = args.get("vote_amount") +async def create_update_proposal( + wallet_rpc_port: Optional[int], + fp: int, + wallet_id: int, + fee: uint64, + vote_amount: Optional[uint64], + proposal_timelock: Optional[uint64], + soft_close_length: Optional[uint64], + attendance_required: Optional[uint64], + pass_percentage: Optional[uint64], + self_destruct_length: Optional[uint64], + oracle_spend_delay: Optional[uint64], + cli_tx_config: CMDTXConfigLoader, +) -> None: new_dao_rules = { "proposal_timelock": proposal_timelock, "soft_close_length": soft_close_length, @@ -532,16 +478,8 @@ async def create_update_proposal(args: Dict[str, Any], wallet_rpc_port: Optional proposal_type="update", new_dao_rules=new_dao_rules, vote_amount=vote_amount, - fee=final_fee, - tx_config=CMDTXConfigLoader.from_json_dict( - { - "min_coin_amount": args["min_coin_amount"], - "max_coin_amount": args["max_coin_amount"], - "coins_to_exclude": args["coins_to_exclude"], - "amounts_to_exclude": args["amounts_to_exclude"], - "reuse_puzhash": args["reuse_puzhash"], - } - ).to_tx_config(units["chia"], config, fingerprint), + fee=fee, + tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), ) if res["success"]: print("Successfully created proposal.") @@ -550,13 +488,16 @@ async def create_update_proposal(args: Dict[str, Any], wallet_rpc_port: Optional print("Failed to create proposal.") -async def create_mint_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None: - wallet_id = args["wallet_id"] - fee = args["fee"] - final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"])) - cat_target_address = args["cat_target_address"] - amount = args["amount"] - vote_amount = args.get("vote_amount") +async def create_mint_proposal( + wallet_rpc_port: Optional[int], + fp: int, + wallet_id: int, + fee: uint64, + amount: uint64, + cat_target_address: str, + vote_amount: Optional[int], + cli_tx_config: CMDTXConfigLoader, +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_create_proposal( wallet_id=wallet_id, @@ -564,16 +505,8 @@ async def create_mint_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[i cat_target_address=cat_target_address, amount=amount, vote_amount=vote_amount, - fee=final_fee, - tx_config=CMDTXConfigLoader.from_json_dict( - { - "min_coin_amount": args["min_coin_amount"], - "max_coin_amount": args["max_coin_amount"], - "coins_to_exclude": args["coins_to_exclude"], - "amounts_to_exclude": args["amounts_to_exclude"], - "reuse_puzhash": args["reuse_puzhash"], - } - ).to_tx_config(units["chia"], config, fingerprint), + fee=fee, + tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), ) if res["success"]: print("Successfully created proposal.") diff --git a/chia/cmds/data.py b/chia/cmds/data.py index baa39345f7a1..c46c1b4c358b 100644 --- a/chia/cmds/data.py +++ b/chia/cmds/data.py @@ -3,16 +3,17 @@ import json import logging from pathlib import Path -from typing import Any, Callable, Coroutine, Dict, List, Optional, TypeVar, Union +from typing import Any, Callable, Coroutine, Dict, List, Optional, Sequence, TypeVar, Union import click from chia.cmds import options +from chia.cmds.param_types import Bytes32ParamType from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.util.ints import uint64 _T = TypeVar("_T") - FC = TypeVar("FC", bound=Union[Callable[..., Any], click.Command]) logger = logging.getLogger(__name__) @@ -79,7 +80,7 @@ def create_data_store_id_option() -> Callable[[FC], FC]: "-store", "--id", help="The hexadecimal store id.", - type=str, + type=Bytes32ParamType(), required=True, ) @@ -106,14 +107,12 @@ def create_rpc_port_option() -> Callable[[FC], FC]: ) -def create_fee_option() -> Callable[[FC], FC]: +def create_root_hash_option() -> Callable[[FC], FC]: return click.option( - "-m", - "--fee", - help="Set the fees for the transaction, in XCH", - type=str, - default=None, - show_default=True, + "-r", + "--root_hash", + help="The hexadecimal root hash", + type=Bytes32ParamType(), required=False, ) @@ -139,12 +138,12 @@ def create_max_page_size_option() -> Callable[[FC], FC]: @data_cmd.command("create_data_store", help="Create a new data store") @create_rpc_port_option() -@create_fee_option() +@options.create_fee() @click.option("--verbose", is_flag=True, help="Enable verbose output.") @options.create_fingerprint() def create_data_store( data_rpc_port: int, - fee: Optional[str], + fee: Optional[uint64], verbose: bool, fingerprint: Optional[int], ) -> None: @@ -156,13 +155,13 @@ def create_data_store( @data_cmd.command("get_value", help="Get the value for a given key and store") @create_data_store_id_option() @create_key_option() -@click.option("-r", "--root_hash", help="The hexadecimal root hash", type=str, required=False) +@create_root_hash_option() @create_rpc_port_option() @options.create_fingerprint() def get_value( - id: str, + id: bytes32, key_string: str, - root_hash: Optional[str], + root_hash: Optional[bytes32], data_rpc_port: int, fingerprint: Optional[int], ) -> None: @@ -175,14 +174,14 @@ def get_value( @create_data_store_id_option() @create_changelist_option() @create_rpc_port_option() -@create_fee_option() +@options.create_fee() @options.create_fingerprint() @click.option("--submit/--no-submit", default=True, help="Submit the result on chain") def update_data_store( - id: str, + id: bytes32, changelist_string: str, data_rpc_port: int, - fee: str, + fee: Optional[uint64], fingerprint: Optional[int], submit: bool, ) -> None: @@ -203,13 +202,13 @@ def update_data_store( @data_cmd.command("update_multiple_stores", help="Update multiple stores by providing the changelist operations") @create_store_updates_option() @create_rpc_port_option() -@create_fee_option() +@options.create_fee() @options.create_fingerprint() @click.option("--submit/--no-submit", default=True, help="Submit the result on chain") def update_multiple_stores( store_updates_string: str, data_rpc_port: int, - fee: str, + fee: uint64, fingerprint: Optional[int], submit: bool, ) -> None: @@ -229,12 +228,12 @@ def update_multiple_stores( @data_cmd.command("submit_pending_root", help="Submit on chain a locally stored batch") @create_data_store_id_option() @create_rpc_port_option() -@create_fee_option() +@options.create_fee() @options.create_fingerprint() def submit_pending_root( - id: str, + id: bytes32, data_rpc_port: int, - fee: str, + fee: uint64, fingerprint: Optional[int], ) -> None: from chia.cmds.data_funcs import submit_pending_root_cmd @@ -251,11 +250,11 @@ def submit_pending_root( @data_cmd.command("submit_all_pending_roots", help="Submit on chain all locally stored batches") @create_rpc_port_option() -@create_fee_option() +@options.create_fee() @options.create_fingerprint() def submit_all_pending_roots( data_rpc_port: int, - fee: str, + fee: uint64, fingerprint: Optional[int], ) -> None: from chia.cmds.data_funcs import submit_all_pending_roots_cmd @@ -271,14 +270,14 @@ def submit_all_pending_roots( @data_cmd.command("get_keys", help="Get all keys for a given store") @create_data_store_id_option() -@click.option("-r", "--root_hash", help="The hexadecimal root hash", type=str, required=False) +@create_root_hash_option() @create_rpc_port_option() @options.create_fingerprint() @create_page_option() @create_max_page_size_option() def get_keys( - id: str, - root_hash: Optional[str], + id: bytes32, + root_hash: Optional[bytes32], data_rpc_port: int, fingerprint: Optional[int], page: Optional[int], @@ -291,14 +290,14 @@ def get_keys( @data_cmd.command("get_keys_values", help="Get all keys and values for a given store") @create_data_store_id_option() -@click.option("-r", "--root_hash", help="The hexadecimal root hash", type=str, required=False) +@create_root_hash_option() @create_rpc_port_option() @options.create_fingerprint() @create_page_option() @create_max_page_size_option() def get_keys_values( - id: str, - root_hash: Optional[str], + id: bytes32, + root_hash: Optional[bytes32], data_rpc_port: int, fingerprint: Optional[int], page: Optional[int], @@ -318,7 +317,7 @@ def get_keys_values( @create_rpc_port_option() @options.create_fingerprint() def get_root( - id: str, + id: bytes32, data_rpc_port: int, fingerprint: Optional[int], ) -> None: @@ -340,7 +339,7 @@ def get_root( @create_rpc_port_option() @options.create_fingerprint() def subscribe( - id: str, + id: bytes32, urls: List[str], data_rpc_port: int, fingerprint: Optional[int], @@ -356,7 +355,7 @@ def subscribe( @create_rpc_port_option() @options.create_fingerprint() def remove_subscription( - id: str, + id: bytes32, urls: List[str], data_rpc_port: int, fingerprint: Optional[int], @@ -372,7 +371,7 @@ def remove_subscription( @options.create_fingerprint() @click.option("--retain", is_flag=True, help="Retain .dat files") def unsubscribe( - id: str, + id: bytes32, data_rpc_port: int, fingerprint: Optional[int], retain: bool, @@ -386,16 +385,16 @@ def unsubscribe( "get_kv_diff", help="Get the inserted and deleted keys and values between an initial and a final hash" ) @create_data_store_id_option() -@click.option("-hash_1", "--hash_1", help="Initial hash", type=str) -@click.option("-hash_2", "--hash_2", help="Final hash", type=str) +@click.option("-hash_1", "--hash_1", help="Initial hash", type=Bytes32ParamType(), required=True) +@click.option("-hash_2", "--hash_2", help="Final hash", type=Bytes32ParamType(), required=True) @create_rpc_port_option() @options.create_fingerprint() @create_page_option() @create_max_page_size_option() def get_kv_diff( - id: str, - hash_1: str, - hash_2: str, + id: bytes32, + hash_1: bytes32, + hash_2: bytes32, data_rpc_port: int, fingerprint: Optional[int], page: Optional[int], @@ -421,7 +420,7 @@ def get_kv_diff( @create_rpc_port_option() @options.create_fingerprint() def get_root_history( - id: str, + id: bytes32, data_rpc_port: int, fingerprint: Optional[int], ) -> None: @@ -450,7 +449,7 @@ def get_root_history( @create_rpc_port_option() @options.create_fingerprint() def add_missing_files( - ids: List[str], + ids: Sequence[bytes32], overwrite: bool, directory: Optional[str], data_rpc_port: int, @@ -461,7 +460,7 @@ def add_missing_files( run( add_missing_files_cmd( rpc_port=data_rpc_port, - ids=ids if ids else None, + ids=list(ids) if ids else None, overwrite=overwrite, foldername=None if directory is None else Path(directory), fingerprint=fingerprint, @@ -470,7 +469,7 @@ def add_missing_files( @data_cmd.command("add_mirror", help="Publish mirror urls on chain") -@click.option("-i", "--id", help="Store id", type=str, required=True) +@create_data_store_id_option() @click.option( "-a", "--amount", help="Amount to spend for this mirror, in mojos", type=int, default=0, show_default=True ) @@ -482,14 +481,14 @@ def add_missing_files( type=str, multiple=True, ) -@create_fee_option() +@options.create_fee() @create_rpc_port_option() @options.create_fingerprint() def add_mirror( - id: str, + id: bytes32, amount: int, urls: List[str], - fee: Optional[str], + fee: Optional[uint64], data_rpc_port: int, fingerprint: Optional[int], ) -> None: @@ -508,13 +507,13 @@ def add_mirror( @data_cmd.command("delete_mirror", help="Delete an owned mirror by its coin id") -@click.option("-c", "--coin_id", help="Coin id", type=str, required=True) -@create_fee_option() +@click.option("-c", "--coin_id", help="Coin id", type=Bytes32ParamType(), required=True) +@options.create_fee() @create_rpc_port_option() @options.create_fingerprint() def delete_mirror( - coin_id: str, - fee: Optional[str], + coin_id: bytes32, + fee: Optional[uint64], data_rpc_port: int, fingerprint: Optional[int], ) -> None: @@ -531,11 +530,11 @@ def delete_mirror( @data_cmd.command("get_mirrors", help="Get a list of all mirrors for a given store") -@click.option("-i", "--id", help="Store id", type=str, required=True) +@create_data_store_id_option() @create_rpc_port_option() @options.create_fingerprint() def get_mirrors( - id: str, + id: bytes32, data_rpc_port: int, fingerprint: Optional[int], ) -> None: @@ -589,7 +588,7 @@ def get_owned_stores( @create_rpc_port_option() @options.create_fingerprint() def get_sync_status( - id: str, + id: bytes32, data_rpc_port: int, fingerprint: Optional[int], ) -> None: @@ -617,25 +616,23 @@ def check_plugins( "clear_pending_roots", help="Clear pending roots that will not be published, associated data may not be recoverable", ) -@click.option("-i", "--id", "id_str", help="Store ID", type=str, required=True) +@create_data_store_id_option() @click.confirmation_option( prompt="Associated data may not be recoverable.\nAre you sure you want to remove the pending roots?", ) @create_rpc_port_option() @options.create_fingerprint() def clear_pending_roots( - id_str: str, + id: bytes32, data_rpc_port: int, fingerprint: Optional[int], ) -> None: from chia.cmds.data_funcs import clear_pending_roots - store_id = bytes32.from_hexstr(id_str) - run( clear_pending_roots( rpc_port=data_rpc_port, - store_id=store_id, + store_id=id, fingerprint=fingerprint, ) ) @@ -670,16 +667,14 @@ def wallet_log_in( @create_key_option(multiple=True) @options.create_fingerprint() def get_proof( - id: str, + id: bytes32, key_strings: List[str], data_rpc_port: int, fingerprint: Optional[int], ) -> None: from chia.cmds.data_funcs import get_proof_cmd - store_id = bytes32.from_hexstr(id) - - run(get_proof_cmd(rpc_port=data_rpc_port, store_id=store_id, fingerprint=fingerprint, key_strings=key_strings)) + run(get_proof_cmd(rpc_port=data_rpc_port, store_id=id, fingerprint=fingerprint, key_strings=key_strings)) @data_cmd.command( diff --git a/chia/cmds/data_funcs.py b/chia/cmds/data_funcs.py index 7de825ec323c..c3d16e2b21db 100644 --- a/chia/cmds/data_funcs.py +++ b/chia/cmds/data_funcs.py @@ -2,12 +2,10 @@ import contextlib import json -from decimal import Decimal from pathlib import Path from typing import Any, AsyncIterator, Dict, List, Optional, Tuple from chia.cmds.cmds_util import get_any_service_client -from chia.cmds.units import units from chia.rpc.data_layer_rpc_client import DataLayerRpcClient from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.byte_types import hexstr_to_bytes @@ -39,49 +37,43 @@ async def wallet_log_in_cmd( async def create_data_store_cmd( rpc_port: Optional[int], - fee: Optional[str], + fee: Optional[uint64], verbose: bool, fingerprint: Optional[int], ) -> None: - final_fee = None if fee is None else uint64(int(Decimal(fee) * units["chia"])) async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): - res = await client.create_data_store(fee=final_fee, verbose=verbose) + res = await client.create_data_store(fee=fee, verbose=verbose) print(json.dumps(res, indent=2, sort_keys=True)) async def get_value_cmd( rpc_port: Optional[int], - store_id: str, + store_id: bytes32, key: str, - root_hash: Optional[str], + root_hash: Optional[bytes32], fingerprint: Optional[int], ) -> None: - store_id_bytes = bytes32.from_hexstr(store_id) key_bytes = hexstr_to_bytes(key) - root_hash_bytes = None if root_hash is None else bytes32.from_hexstr(root_hash) async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): - res = await client.get_value(store_id=store_id_bytes, key=key_bytes, root_hash=root_hash_bytes) + res = await client.get_value(store_id=store_id, key=key_bytes, root_hash=root_hash) print(json.dumps(res, indent=2, sort_keys=True)) async def update_data_store_cmd( rpc_port: Optional[int], - store_id: str, + store_id: bytes32, changelist: List[Dict[str, str]], - fee: Optional[str], + fee: Optional[uint64], fingerprint: Optional[int], submit_on_chain: bool, root_path: Optional[Path] = None, ) -> Dict[str, Any]: - store_id_bytes = bytes32.from_hexstr(store_id) - final_fee = None if fee is None else uint64(int(Decimal(fee) * units["chia"])) res = dict() - async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): res = await client.update_data_store( - store_id=store_id_bytes, + store_id=store_id, changelist=changelist, - fee=final_fee, + fee=fee, submit_on_chain=submit_on_chain, ) print(json.dumps(res, indent=2, sort_keys=True)) @@ -92,18 +84,17 @@ async def update_data_store_cmd( async def update_multiple_stores_cmd( rpc_port: Optional[int], store_updates: List[Dict[str, str]], - fee: Optional[str], + fee: Optional[uint64], fingerprint: Optional[int], submit_on_chain: bool, root_path: Optional[Path] = None, ) -> Dict[str, Any]: - final_fee = None if fee is None else uint64(int(Decimal(fee) * units["chia"])) res = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): res = await client.update_multiple_stores( store_updates=store_updates, - fee=final_fee, + fee=fee, submit_on_chain=submit_on_chain, ) print(json.dumps(res, indent=2, sort_keys=True)) @@ -113,18 +104,16 @@ async def update_multiple_stores_cmd( async def submit_pending_root_cmd( rpc_port: Optional[int], - store_id: str, - fee: Optional[str], + store_id: bytes32, + fee: Optional[uint64], fingerprint: Optional[int], root_path: Optional[Path] = None, ) -> Dict[str, Any]: - store_id_bytes = bytes32.from_hexstr(store_id) - final_fee = None if fee is None else uint64(int(Decimal(fee) * units["chia"])) res = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): res = await client.submit_pending_root( - store_id=store_id_bytes, - fee=final_fee, + store_id=store_id, + fee=fee, ) print(json.dumps(res, indent=2, sort_keys=True)) @@ -133,14 +122,13 @@ async def submit_pending_root_cmd( async def submit_all_pending_roots_cmd( rpc_port: Optional[int], - fee: Optional[str], + fee: Optional[uint64], fingerprint: Optional[int], root_path: Optional[Path] = None, ) -> Dict[str, Any]: - final_fee = None if fee is None else uint64(Decimal(fee) * units["chia"]) res = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): - res = await client.submit_all_pending_roots(fee=final_fee) + res = await client.submit_all_pending_roots(fee=fee) print(json.dumps(res, indent=2, sort_keys=True)) return res @@ -148,20 +136,16 @@ async def submit_all_pending_roots_cmd( async def get_keys_cmd( rpc_port: Optional[int], - store_id: str, - root_hash: Optional[str], + store_id: bytes32, + root_hash: Optional[bytes32], fingerprint: Optional[int], page: Optional[int], max_page_size: Optional[int], root_path: Optional[Path] = None, ) -> Dict[str, Any]: - store_id_bytes = bytes32.from_hexstr(store_id) - root_hash_bytes = None if root_hash is None else bytes32.from_hexstr(root_hash) res = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): - res = await client.get_keys( - store_id=store_id_bytes, root_hash=root_hash_bytes, page=page, max_page_size=max_page_size - ) + res = await client.get_keys(store_id=store_id, root_hash=root_hash, page=page, max_page_size=max_page_size) print(json.dumps(res, indent=2, sort_keys=True)) return res @@ -169,19 +153,17 @@ async def get_keys_cmd( async def get_keys_values_cmd( rpc_port: Optional[int], - store_id: str, - root_hash: Optional[str], + store_id: bytes32, + root_hash: Optional[bytes32], fingerprint: Optional[int], page: Optional[int], max_page_size: Optional[int], root_path: Optional[Path] = None, ) -> Dict[str, Any]: - store_id_bytes = bytes32.from_hexstr(store_id) - root_hash_bytes = None if root_hash is None else bytes32.from_hexstr(root_hash) res = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): res = await client.get_keys_values( - store_id=store_id_bytes, root_hash=root_hash_bytes, page=page, max_page_size=max_page_size + store_id=store_id, root_hash=root_hash, page=page, max_page_size=max_page_size ) print(json.dumps(res, indent=2, sort_keys=True)) @@ -190,69 +172,61 @@ async def get_keys_values_cmd( async def get_root_cmd( rpc_port: Optional[int], - store_id: str, + store_id: bytes32, fingerprint: Optional[int], ) -> None: - store_id_bytes = bytes32.from_hexstr(store_id) async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): - res = await client.get_root(store_id=store_id_bytes) + res = await client.get_root(store_id=store_id) print(json.dumps(res, indent=2, sort_keys=True)) async def subscribe_cmd( rpc_port: Optional[int], - store_id: str, + store_id: bytes32, urls: List[str], fingerprint: Optional[int], ) -> None: - store_id_bytes = bytes32.from_hexstr(store_id) async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): - res = await client.subscribe(store_id=store_id_bytes, urls=urls) + res = await client.subscribe(store_id=store_id, urls=urls) print(json.dumps(res, indent=2, sort_keys=True)) async def unsubscribe_cmd( rpc_port: Optional[int], - store_id: str, + store_id: bytes32, fingerprint: Optional[int], retain: bool, ) -> None: - store_id_bytes = bytes32.from_hexstr(store_id) async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): - res = await client.unsubscribe(store_id=store_id_bytes, retain=retain) + res = await client.unsubscribe(store_id=store_id, retain=retain) print(json.dumps(res, indent=2, sort_keys=True)) async def remove_subscriptions_cmd( rpc_port: Optional[int], - store_id: str, + store_id: bytes32, urls: List[str], fingerprint: Optional[int], ) -> None: - store_id_bytes = bytes32.from_hexstr(store_id) async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): - res = await client.remove_subscriptions(store_id=store_id_bytes, urls=urls) + res = await client.remove_subscriptions(store_id=store_id, urls=urls) print(json.dumps(res, indent=2, sort_keys=True)) async def get_kv_diff_cmd( rpc_port: Optional[int], - store_id: str, - hash_1: str, - hash_2: str, + store_id: bytes32, + hash_1: bytes32, + hash_2: bytes32, fingerprint: Optional[int], page: Optional[int], max_page_size: Optional[int], root_path: Optional[Path] = None, ) -> Dict[str, Any]: - store_id_bytes = bytes32.from_hexstr(store_id) - hash_1_bytes = bytes32.from_hexstr(hash_1) - hash_2_bytes = bytes32.from_hexstr(hash_2) res = dict() - async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): res = await client.get_kv_diff( - store_id=store_id_bytes, hash_1=hash_1_bytes, hash_2=hash_2_bytes, page=page, max_page_size=max_page_size + store_id=store_id, hash_1=hash_1, hash_2=hash_2, page=page, max_page_size=max_page_size ) print(json.dumps(res, indent=2, sort_keys=True)) @@ -261,25 +235,24 @@ async def get_kv_diff_cmd( async def get_root_history_cmd( rpc_port: Optional[int], - store_id: str, + store_id: bytes32, fingerprint: Optional[int], ) -> None: - store_id_bytes = bytes32.from_hexstr(store_id) async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): - res = await client.get_root_history(store_id=store_id_bytes) + res = await client.get_root_history(store_id=store_id) print(json.dumps(res, indent=2, sort_keys=True)) async def add_missing_files_cmd( rpc_port: Optional[int], - ids: Optional[List[str]], + ids: Optional[List[bytes32]], overwrite: bool, foldername: Optional[Path], fingerprint: Optional[int], ) -> None: async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): res = await client.add_missing_files( - store_ids=(None if ids is None else [bytes32.from_hexstr(id) for id in ids]), + store_ids=ids, overwrite=overwrite, foldername=foldername, ) @@ -288,48 +261,43 @@ async def add_missing_files_cmd( async def add_mirror_cmd( rpc_port: Optional[int], - store_id: str, + store_id: bytes32, urls: List[str], amount: int, - fee: Optional[str], + fee: Optional[uint64], fingerprint: Optional[int], ) -> None: - store_id_bytes = bytes32.from_hexstr(store_id) - final_fee = None if fee is None else uint64(int(Decimal(fee) * units["chia"])) async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): res = await client.add_mirror( - store_id=store_id_bytes, + store_id=store_id, urls=urls, amount=amount, - fee=final_fee, + fee=fee, ) print(json.dumps(res, indent=2, sort_keys=True)) async def delete_mirror_cmd( rpc_port: Optional[int], - coin_id: str, - fee: Optional[str], + coin_id: bytes32, + fee: Optional[uint64], fingerprint: Optional[int], ) -> None: - coin_id_bytes = bytes32.from_hexstr(coin_id) - final_fee = None if fee is None else uint64(int(Decimal(fee) * units["chia"])) async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): res = await client.delete_mirror( - coin_id=coin_id_bytes, - fee=final_fee, + coin_id=coin_id, + fee=fee, ) print(json.dumps(res, indent=2, sort_keys=True)) async def get_mirrors_cmd( rpc_port: Optional[int], - store_id: str, + store_id: bytes32, fingerprint: Optional[int], ) -> None: - store_id_bytes = bytes32.from_hexstr(store_id) async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): - res = await client.get_mirrors(store_id=store_id_bytes) + res = await client.get_mirrors(store_id=store_id) print(json.dumps(res, indent=2, sort_keys=True)) @@ -353,12 +321,11 @@ async def get_owned_stores_cmd( async def get_sync_status_cmd( rpc_port: Optional[int], - store_id: str, + store_id: bytes32, fingerprint: Optional[int], ) -> None: - store_id_bytes = bytes32.from_hexstr(store_id) async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): - res = await client.get_sync_status(store_id=store_id_bytes) + res = await client.get_sync_status(store_id=store_id) print(json.dumps(res, indent=2, sort_keys=True)) diff --git a/chia/cmds/options.py b/chia/cmds/options.py index 6a0147fe5956..5366426e71ff 100644 --- a/chia/cmds/options.py +++ b/chia/cmds/options.py @@ -4,6 +4,8 @@ import click +from chia.cmds.param_types import TransactionFeeParamType + FC = TypeVar("FC", bound=Union[Callable[..., Any], click.Command]) @@ -16,3 +18,15 @@ def create_fingerprint(required: bool = False) -> Callable[[FC], FC]: # TODO: should be uint32 type=int, ) + + +def create_fee(message: str = "Set the fees for the transaction, in XCH", required: bool = True) -> Callable[[FC], FC]: + return click.option( + "-m", + "--fee", + help=message, + type=TransactionFeeParamType(), + default="0", + show_default=True, + required=required, + ) diff --git a/chia/cmds/param_types.py b/chia/cmds/param_types.py new file mode 100644 index 000000000000..bd6d561f6365 --- /dev/null +++ b/chia/cmds/param_types.py @@ -0,0 +1,221 @@ +from __future__ import annotations + +from dataclasses import dataclass +from decimal import Decimal, InvalidOperation +from typing import Any, Callable, Optional, Union + +import click + +from chia.cmds.units import units +from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.util.bech32m import bech32_decode, decode_puzzle_hash +from chia.util.config import load_config, selected_network_address_prefix +from chia.util.default_root import DEFAULT_ROOT_PATH +from chia.util.ints import uint64 +from chia.wallet.util.address_type import AddressType + +one_decimal_mojo = Decimal("1e-12") + + +def validate_uint64( + value: str, + fail_func: Callable[[str, Optional[click.Parameter], Optional[click.Context]], None], + param: Optional[click.Parameter], + ctx: Optional[click.Context], +) -> uint64: + try: + d_value = Decimal(value) + except InvalidOperation as e: + fail_func(f"Value must be a valid number: {e}", param, ctx) + if d_value.is_signed(): + fail_func("Value must be a positive integer", param, ctx) + if d_value != d_value.to_integral(): + fail_func("Value must be an integer", param, ctx) + try: + u_value = uint64(value) + except ValueError: + fail_func("Value must be a valid uint64 number", param, ctx) + return u_value + + +def validate_decimal_xch( + value: str, + fail_func: Callable[[str, Optional[click.Parameter], Optional[click.Context]], None], + param: Optional[click.Parameter], + ctx: Optional[click.Context], +) -> Decimal: + try: + d_value = Decimal(value) + except InvalidOperation: + fail_func("Value must be decimal dotted value in XCH (e.g. 0.00005)", param, ctx) + if d_value.is_signed(): + fail_func("Value can not be negative", param, ctx) + if d_value % one_decimal_mojo != Decimal(0): # if there is a remainder, it contains a value smaller than one mojo + fail_func("Invalid amount of mojos, Partial mojos (Fractions of a mojo).", param, ctx) + return d_value + + +class TransactionFeeParamType(click.ParamType): + """ + A Click parameter type for transaction fees, which can be specified in XCH or mojos. + """ + + name: str = "XCH" # type name for cli, TODO: Change once the mojo flag is implemented + value_limit: Decimal = Decimal("0.5") + + def convert(self, value: Any, param: Optional[click.Parameter], ctx: Optional[click.Context]) -> uint64: + # suggested by click, but we are not using it to avoid possible misinterpretation of units. + # if isinstance(value, uint64): + # return value + if not isinstance(value, str): + self.fail("Invalid Type, fee must be string.", param, ctx) + mojos = False # TODO: Add unit logic + if mojos: + return validate_uint64(value, self.fail, param, ctx) + d_value = validate_decimal_xch(value, self.fail, param, ctx) + if not self.value_limit.is_zero() and d_value > self.value_limit: + self.fail(f"Fee must be in the range 0 to {self.value_limit}", param, ctx) + try: + return uint64(d_value * units["chia"]) + except ValueError: + self.fail("Fee must be a valid uint64 number", param, ctx) + + +@dataclass(frozen=True) +class CliAmount: + """ + A dataclass for TX / wallet amounts for both XCH and CAT, and of course mojos. + """ + + mojos: bool + amount: Union[uint64, Decimal, None] # uint64 if mojos, Decimal if not, None if default value is none + + def convert_amount_with_default( + self, mojo_per_unit: int, default_value: Optional[uint64] = uint64(0) + ) -> Optional[uint64]: + if self.amount is None: # if the value is set to none, return the default value + return default_value + return self.convert_amount(mojo_per_unit) + + def convert_amount(self, mojo_per_unit: int) -> uint64: + if self.mojos: + if not isinstance(self.amount, uint64): + raise ValueError("Amount must be a uint64 if mojos flag is set.") + return self.amount + if not isinstance(self.amount, Decimal): + raise ValueError("Amount must be a Decimal if mojos flag is not set.") + return uint64(self.amount * mojo_per_unit) + + +class AmountParamType(click.ParamType): + """ + A Click parameter type for TX / wallet amounts for both XCH and CAT, and of course mojos. + """ + + name: str = "XCH" # type name for cli, TODO: Change once the mojo flag is implemented + + def convert(self, value: Any, param: Optional[click.Parameter], ctx: Optional[click.Context]) -> CliAmount: + # suggested by click, but being left in as mojos flag makes default misrepresentation less likely. + if isinstance(value, CliAmount): + return value + if not isinstance(value, str): + self.fail("Invalid Type, amount must be string or CliAmount.", param, ctx) + mojos = False # TODO: Add unit logic + if mojos: + m_value = validate_uint64(value, self.fail, param, ctx) + return CliAmount(mojos=True, amount=m_value) + d_value = validate_decimal_xch(value, self.fail, param, ctx) + return CliAmount(mojos=False, amount=d_value) + + +@dataclass(frozen=True) +class CliAddress: + """ + A dataclass for the cli, with the address type and puzzle hash. + """ + + puzzle_hash: bytes32 + original_address: str + address_type: AddressType + + def validate_address_type(self, address_type: AddressType) -> str: + if self.address_type is not address_type: + raise ValueError(f"Address must be of type {address_type}") + return self.original_address + + def validate_address_type_get_ph(self, address_type: AddressType) -> bytes32: + if self.address_type is not address_type: + raise ValueError(f"Address must be of type {address_type}") + return self.puzzle_hash + + +class AddressParamType(click.ParamType): + """ + A Click parameter type for bech32m encoded addresses, it gives a class with the address type and puzzle hash. + """ + + name: str = "Address" # type name for cli + + def convert(self, value: Any, param: Optional[click.Parameter], ctx: Optional[click.Context]) -> CliAddress: + # suggested by click, but not really used so removed to make unexpected types more obvious. + # if isinstance(value, CliAddress): + # return value + if not isinstance(value, str): + self.fail("Invalid Type, address must be string.", param, ctx) + try: + hrp, b32data = bech32_decode(value) + if hrp in ["xch", "txch"]: # I hate having to load the config here + addr_type: AddressType = AddressType.XCH + expected_prefix = ctx.obj.get("expected_prefix") if ctx else None # attempt to get cached prefix + if expected_prefix is None: + root_path = ctx.obj["root_path"] if ctx is not None else DEFAULT_ROOT_PATH + config = load_config(root_path, "config.yaml") + expected_prefix = selected_network_address_prefix(config) + + if ctx is not None: + ctx.obj["expected_prefix"] = expected_prefix # cache prefix + # now that we have the expected prefix, we can validate the address is for the right network + if hrp != expected_prefix: + self.fail(f"Unexpected Address Prefix: {hrp}, are you sure its for the right network?", param, ctx) + else: # all other address prefixes (Not xch / txch) + addr_type = AddressType(hrp) + return CliAddress(puzzle_hash=decode_puzzle_hash(value), address_type=addr_type, original_address=value) + except ValueError: + self.fail("Address must be a valid bech32m address", param, ctx) + + +class Bytes32ParamType(click.ParamType): + """ + A Click parameter type for bytes32 hex strings, with or without the 0x prefix. + """ + + name: str = "HexString" # type name for cli + + def convert(self, value: Any, param: Optional[click.Parameter], ctx: Optional[click.Context]) -> bytes32: + # suggested by click but deemed not necessary due to unnecessary complexity. + # if isinstance(value, bytes32): + # return value + if not isinstance(value, str): + self.fail("Invalid Type, value must be string.", param, ctx) + try: + return bytes32.from_hexstr(value) + except ValueError: + self.fail("Value must be a valid bytes32 hex string like a coin id or puzzle hash", param, ctx) + + +class Uint64ParamType(click.ParamType): + """ + A Click parameter type for Uint64 integers. + """ + + name: str = uint64.__name__ # type name for cli + + def convert(self, value: Any, param: Optional[click.Parameter], ctx: Optional[click.Context]) -> uint64: + if isinstance(value, uint64): # required by click + return value + if not isinstance(value, str): + self.fail("Invalid Type, value must be string or uint64.", param, ctx) + return validate_uint64(value, self.fail, param, ctx) + + +cli_amount_none = CliAmount(mojos=False, amount=None) diff --git a/chia/cmds/plotnft.py b/chia/cmds/plotnft.py index 888c2d05ba74..53b59f754652 100644 --- a/chia/cmds/plotnft.py +++ b/chia/cmds/plotnft.py @@ -1,23 +1,13 @@ from __future__ import annotations -from decimal import Decimal from typing import Optional import click from chia.cmds import options - -MAX_CMDLINE_FEE = Decimal(0.5) - - -def validate_fee(ctx: click.Context, param: click.Parameter, value: str) -> str: - try: - fee = Decimal(value) - except ValueError: - raise click.BadParameter("Fee must be decimal dotted value in XCH (e.g. 0.00005)") - if fee < 0 or fee > MAX_CMDLINE_FEE: - raise click.BadParameter(f"Fee must be in the range 0 to {MAX_CMDLINE_FEE}") - return value +from chia.cmds.param_types import AddressParamType, Bytes32ParamType, CliAddress +from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.util.ints import uint64 @click.group("plotnft", help="Manage your plot NFTs") @@ -44,8 +34,8 @@ def show_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> None: @plotnft_cmd.command("get_login_link", help="Create a login link for a pool. To get the launcher id, use plotnft show.") -@click.option("-l", "--launcher_id", help="Launcher ID of the plotnft", type=str, required=True) -def get_login_link_cmd(launcher_id: str) -> None: +@click.option("-l", "--launcher_id", help="Launcher ID of the plotnft", type=Bytes32ParamType(), required=True) +def get_login_link_cmd(launcher_id: bytes32) -> None: import asyncio from .plotnft_funcs import get_login_link @@ -58,15 +48,8 @@ def get_login_link_cmd(launcher_id: str) -> None: @options.create_fingerprint() @click.option("-u", "--pool_url", help="HTTPS host:port of the pool to join", type=str, required=False) @click.option("-s", "--state", help="Initial state of Plot NFT: local or pool", type=str, required=True) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH. Fee is used TWICE: once to create the singleton, once for init.", - type=str, - default="0", - show_default=True, - required=True, - callback=validate_fee, +@options.create_fee( + "Set the fees per transaction, in XCH. Fee is used TWICE: once to create the singleton, once for init." ) @click.option( "-wp", @@ -80,7 +63,7 @@ def create_cmd( fingerprint: int, pool_url: str, state: str, - fee: str, + fee: uint64, dont_prompt: bool, ) -> None: import asyncio @@ -95,9 +78,7 @@ def create_cmd( return valid_initial_states = {"pool": "FARMING_TO_POOL", "local": "SELF_POOLING"} asyncio.run( - create( - wallet_rpc_port, fingerprint, pool_url, valid_initial_states[state], Decimal(fee), prompt=not dont_prompt - ) + create(wallet_rpc_port, fingerprint, pool_url, valid_initial_states[state], fee, prompt=not dont_prompt) ) @@ -106,16 +87,7 @@ def create_cmd( @click.option("-i", "--id", help="ID of the wallet to use", type=int, default=None, show_default=True, required=True) @options.create_fingerprint() @click.option("-u", "--pool_url", help="HTTPS host:port of the pool to join", type=str, required=True) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH. Fee is used TWICE: once to leave pool, once to join.", - type=str, - default="0", - show_default=True, - required=True, - callback=validate_fee, -) +@options.create_fee("Set the fees per transaction, in XCH. Fee is used TWICE: once to leave pool, once to join.") @click.option( "-wp", "--wallet-rpc-port", @@ -124,7 +96,7 @@ def create_cmd( default=None, ) def join_cmd( - wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee: int, pool_url: str, dont_prompt: bool + wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee: uint64, pool_url: str, dont_prompt: bool ) -> None: import asyncio @@ -135,7 +107,7 @@ def join_cmd( wallet_rpc_port=wallet_rpc_port, fingerprint=fingerprint, pool_url=pool_url, - fee=Decimal(fee), + fee=fee, wallet_id=id, prompt=not dont_prompt, ) @@ -146,16 +118,7 @@ def join_cmd( @click.option("-y", "--yes", "dont_prompt", help="No prompts", is_flag=True) @click.option("-i", "--id", help="ID of the wallet to use", type=int, default=None, show_default=True, required=True) @options.create_fingerprint() -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH. Fee is charged TWICE.", - type=str, - default="0", - show_default=True, - required=True, - callback=validate_fee, -) +@options.create_fee("Set the fees per transaction, in XCH. Fee is charged TWICE.") @click.option( "-wp", "--wallet-rpc-port", @@ -163,7 +126,7 @@ def join_cmd( type=int, default=None, ) -def self_pool_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee: int, dont_prompt: bool) -> None: +def self_pool_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee: uint64, dont_prompt: bool) -> None: import asyncio from .plotnft_funcs import self_pool @@ -172,7 +135,7 @@ def self_pool_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee self_pool( wallet_rpc_port=wallet_rpc_port, fingerprint=fingerprint, - fee=Decimal(fee), + fee=fee, wallet_id=id, prompt=not dont_prompt, ) @@ -200,16 +163,7 @@ def inspect(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> None: @plotnft_cmd.command("claim", help="Claim rewards from a plot NFT") @click.option("-i", "--id", help="ID of the wallet to use", type=int, default=None, show_default=True, required=True) @options.create_fingerprint() -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - required=True, - callback=validate_fee, -) +@options.create_fee() @click.option( "-wp", "--wallet-rpc-port", @@ -217,7 +171,7 @@ def inspect(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> None: type=int, default=None, ) -def claim(wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee: int) -> None: +def claim(wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee: uint64) -> None: import asyncio from .plotnft_funcs import claim_cmd @@ -226,7 +180,7 @@ def claim(wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee: int) - claim_cmd( wallet_rpc_port=wallet_rpc_port, fingerprint=fingerprint, - fee=Decimal(fee), + fee=fee, wallet_id=id, ) ) @@ -237,8 +191,8 @@ def claim(wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee: int) - help="Change the payout instructions for a pool. To get the launcher id, use plotnft show.", ) @click.option("-l", "--launcher_id", help="Launcher ID of the plotnft", type=str, required=True) -@click.option("-a", "--address", help="New address for payout instructions", type=str, required=True) -def change_payout_instructions_cmd(launcher_id: str, address: str) -> None: +@click.option("-a", "--address", help="New address for payout instructions", type=AddressParamType(), required=True) +def change_payout_instructions_cmd(launcher_id: str, address: CliAddress) -> None: import asyncio from .plotnft_funcs import change_payout_instructions diff --git a/chia/cmds/plotnft_funcs.py b/chia/cmds/plotnft_funcs.py index cf4208da108f..697035927ffa 100644 --- a/chia/cmds/plotnft_funcs.py +++ b/chia/cmds/plotnft_funcs.py @@ -5,7 +5,6 @@ import json import time from dataclasses import replace -from decimal import Decimal from pprint import pprint from typing import Any, Awaitable, Callable, Dict, List, Optional @@ -18,7 +17,7 @@ transaction_status_msg, transaction_submitted_msg, ) -from chia.cmds.units import units +from chia.cmds.param_types import CliAddress from chia.cmds.wallet_funcs import print_balance, wallet_coin_unit from chia.pools.pool_config import PoolWalletConfig, load_pool_config, update_pool_config from chia.pools.pool_wallet_info import PoolSingletonState, PoolWalletInfo @@ -28,13 +27,14 @@ from chia.server.server import ssl_context_for_root from chia.ssl.create_ssl import get_mozilla_ca_crt from chia.types.blockchain_format.sized_bytes import bytes32 -from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash +from chia.util.bech32m import encode_puzzle_hash from chia.util.byte_types import hexstr_to_bytes from chia.util.config import load_config from chia.util.default_root import DEFAULT_ROOT_PATH from chia.util.errors import CliRpcConnectionError from chia.util.ints import uint32, uint64 from chia.wallet.transaction_record import TransactionRecord +from chia.wallet.util.address_type import AddressType from chia.wallet.util.wallet_types import WalletType @@ -62,10 +62,9 @@ async def create_pool_args(pool_url: str) -> Dict[str, Any]: async def create( - wallet_rpc_port: Optional[int], fingerprint: int, pool_url: Optional[str], state: str, fee: Decimal, *, prompt: bool + wallet_rpc_port: Optional[int], fingerprint: int, pool_url: Optional[str], state: str, fee: uint64, *, prompt: bool ) -> None: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, _): - fee_mojos = uint64(int(fee * units["chia"])) target_puzzle_hash: Optional[bytes32] # Could use initial_pool_state_from_dict to simplify if state == "SELF_POOLING": @@ -99,7 +98,7 @@ async def create( "localhost:5000", "new", state, - fee_mojos, + fee, ) start = time.time() while time.time() - start < 10: @@ -235,8 +234,7 @@ async def show(wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id_pass await pprint_all_pool_wallet_state(wallet_client, summaries_response, address_prefix, pool_state_dict) -async def get_login_link(launcher_id_str: str) -> None: - launcher_id: bytes32 = bytes32.from_hexstr(launcher_id_str) +async def get_login_link(launcher_id: bytes32) -> None: async with get_any_service_client(FarmerRpcClient) as (farmer_client, _): login_link: Optional[str] = await farmer_client.get_pool_login_link(launcher_id) if login_link is None: @@ -276,13 +274,12 @@ async def join_pool( wallet_rpc_port: Optional[int], fingerprint: int, pool_url: str, - fee: Decimal, + fee: uint64, wallet_id: int, prompt: bool, ) -> None: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): enforce_https = config["full_node"]["selected_network"] == "mainnet" - fee_mojos = uint64(int(fee * units["chia"])) if enforce_https and not pool_url.startswith("https://"): print(f"Pool URLs must be HTTPS on mainnet {pool_url}. Aborting.") @@ -316,19 +313,18 @@ async def join_pool( hexstr_to_bytes(json_dict["target_puzzle_hash"]), pool_url, json_dict["relative_lock_height"], - fee_mojos, + fee, ) await submit_tx_with_confirmation(msg, prompt, func, wallet_client, fingerprint, wallet_id) async def self_pool( - *, wallet_rpc_port: Optional[int], fingerprint: int, fee: Decimal, wallet_id: int, prompt: bool + *, wallet_rpc_port: Optional[int], fingerprint: int, fee: uint64, wallet_id: int, prompt: bool ) -> None: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, _): - fee_mojos = uint64(int(fee * units["chia"])) msg = f"Will start self-farming with Plot NFT on wallet id {wallet_id} fingerprint {fingerprint}." - func = functools.partial(wallet_client.pw_self_pool, wallet_id, fee_mojos) + func = functools.partial(wallet_client.pw_self_pool, wallet_id, fee) await submit_tx_with_confirmation(msg, prompt, func, wallet_client, fingerprint, wallet_id) @@ -345,26 +341,21 @@ async def inspect_cmd(wallet_rpc_port: Optional[int], fingerprint: int, wallet_i ) -async def claim_cmd(*, wallet_rpc_port: Optional[int], fingerprint: int, fee: Decimal, wallet_id: int) -> None: +async def claim_cmd(*, wallet_rpc_port: Optional[int], fingerprint: int, fee: uint64, wallet_id: int) -> None: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, _): - fee_mojos = uint64(int(fee * units["chia"])) msg = f"\nWill claim rewards for wallet ID: {wallet_id}." func = functools.partial( wallet_client.pw_absorb_rewards, wallet_id, - fee_mojos, + fee, ) await submit_tx_with_confirmation(msg, False, func, wallet_client, fingerprint, wallet_id) -async def change_payout_instructions(launcher_id: str, address: str) -> None: +async def change_payout_instructions(launcher_id: str, address: CliAddress) -> None: new_pool_configs: List[PoolWalletConfig] = [] id_found = False - try: - puzzle_hash = decode_puzzle_hash(address) - except ValueError: - print(f"Invalid Address: {address}") - return + puzzle_hash = address.validate_address_type_get_ph(AddressType.XCH) old_configs: List[PoolWalletConfig] = load_pool_config(DEFAULT_ROOT_PATH) for pool_config in old_configs: diff --git a/chia/cmds/wallet.py b/chia/cmds/wallet.py index 968b567cdd32..f9986a5254e4 100644 --- a/chia/cmds/wallet.py +++ b/chia/cmds/wallet.py @@ -2,7 +2,6 @@ import asyncio import pathlib -from decimal import Decimal from typing import List, Optional, Sequence import click @@ -10,7 +9,16 @@ from chia.cmds import options from chia.cmds.check_wallet_db import help_text as check_help_text from chia.cmds.coins import coins_cmd -from chia.cmds.plotnft import validate_fee +from chia.cmds.param_types import ( + AddressParamType, + AmountParamType, + Bytes32ParamType, + CliAddress, + CliAmount, + cli_amount_none, +) +from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.util.ints import uint32, uint64 from chia.wallet.transaction_sorting import SortKey from chia.wallet.util.address_type import AddressType from chia.wallet.util.wallet_types import WalletType @@ -142,18 +150,11 @@ def get_transactions_cmd( ) @options.create_fingerprint() @click.option("-i", "--id", help="Id of the wallet to use", type=int, default=1, show_default=True, required=True) -@click.option("-a", "--amount", help="How much chia to send, in XCH", type=str, required=True) +@click.option("-a", "--amount", help="How much chia to send, in XCH", type=AmountParamType(), required=True) @click.option("-e", "--memo", help="Additional memo for the transaction", type=str, default=None) -@click.option( - "-m", - "--fee", - help="Set the fees for the transaction, in XCH", - type=str, - default="0", - show_default=True, - required=True, -) -@click.option("-t", "--address", help="Address to send the XCH", type=str, required=True) +@options.create_fee() +# TODO: Fix RPC as this should take a puzzle_hash not an address. +@click.option("-t", "--address", help="Address to send the XCH", type=AddressParamType(), required=True) @click.option( "-o", "--override", help="Submits transaction without checking for unusual values", is_flag=True, default=False ) @@ -161,22 +162,23 @@ def get_transactions_cmd( "-ma", "--min-coin-amount", help="Ignore coins worth less then this much XCH or CAT units", - type=str, + type=AmountParamType(), required=False, - default="0", + default=cli_amount_none, ) @click.option( "-l", "--max-coin-amount", help="Ignore coins worth more then this much XCH or CAT units", - type=str, + type=AmountParamType(), required=False, - default=None, + default=cli_amount_none, ) @click.option( "--exclude-coin", "coins_to_exclude", multiple=True, + type=Bytes32ParamType(), help="Exclude this coin from being spent.", ) @click.option( @@ -196,14 +198,14 @@ def send_cmd( wallet_rpc_port: Optional[int], fingerprint: int, id: int, - amount: str, + amount: CliAmount, memo: Optional[str], - fee: str, - address: str, + fee: uint64, + address: CliAddress, override: bool, - min_coin_amount: str, - max_coin_amount: Optional[str], - coins_to_exclude: Sequence[str], + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + coins_to_exclude: Sequence[bytes32], reuse: bool, clawback_time: int, ) -> None: # pragma: no cover @@ -214,9 +216,9 @@ def send_cmd( wallet_rpc_port=wallet_rpc_port, fp=fingerprint, wallet_id=id, - amount=Decimal(amount), + amount=amount, memo=memo, - fee=Decimal(fee), + fee=fee, address=address, override=override, min_coin_amount=min_coin_amount, @@ -299,9 +301,7 @@ def get_address_cmd(wallet_rpc_port: Optional[int], id: int, fingerprint: int, n default="", required=True, ) -@click.option( - "-m", "--fee", help="A fee to add to the offer when it gets taken, in XCH", default="0", show_default=True -) +@options.create_fee("A fee to add to the offer when it gets taken, in XCH") @click.option( "--force", help="Force to push the spend bundle even it may be a double spend", @@ -309,14 +309,12 @@ def get_address_cmd(wallet_rpc_port: Optional[int], id: int, fingerprint: int, n default=False, ) def clawback( - wallet_rpc_port: Optional[int], id: int, fingerprint: int, tx_ids: str, fee: str, force: bool + wallet_rpc_port: Optional[int], id: int, fingerprint: int, tx_ids: str, fee: uint64, force: bool ) -> None: # pragma: no cover from .wallet_funcs import spend_clawback asyncio.run( - spend_clawback( - wallet_rpc_port=wallet_rpc_port, fp=fingerprint, fee=Decimal(fee), tx_ids_str=tx_ids, force=force - ) + spend_clawback(wallet_rpc_port=wallet_rpc_port, fp=fingerprint, fee=fee, tx_ids_str=tx_ids, force=force) ) @@ -360,9 +358,12 @@ def get_derivation_index_cmd(wallet_rpc_port: Optional[int], fingerprint: int) - default=None, ) @options.create_fingerprint() -@click.option("-a", "--address", help="The address you want to use for signing", type=str, required=True) +# TODO: Change RPC's to use the puzzle hash instead of address +@click.option("-a", "--address", help="The address you want to use for signing", type=AddressParamType(), required=True) @click.option("-m", "--hex_message", help="The hex message you want sign", type=str, required=True) -def address_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, address: str, hex_message: str) -> None: +def address_sign_message( + wallet_rpc_port: Optional[int], fingerprint: int, address: CliAddress, hex_message: str +) -> None: from .wallet_funcs import sign_message asyncio.run( @@ -408,6 +409,7 @@ def update_derivation_index_cmd(wallet_rpc_port: Optional[int], fingerprint: int "-id", "--asset-id", help="The Asset ID of the coin you wish to add/rename (the treehash of the TAIL program)", + type=Bytes32ParamType(), required=True, ) @click.option( @@ -416,7 +418,7 @@ def update_derivation_index_cmd(wallet_rpc_port: Optional[int], fingerprint: int help="The name you wish to designate to the token", ) @options.create_fingerprint() -def add_token_cmd(wallet_rpc_port: Optional[int], asset_id: str, token_name: str, fingerprint: int) -> None: +def add_token_cmd(wallet_rpc_port: Optional[int], asset_id: bytes32, token_name: str, fingerprint: int) -> None: from .wallet_funcs import add_token asyncio.run(add_token(wallet_rpc_port, fingerprint, asset_id, token_name)) @@ -451,9 +453,7 @@ def add_token_cmd(wallet_rpc_port: Optional[int], asset_id: str, token_name: str required=True, type=click.Path(dir_okay=False, writable=True, path_type=pathlib.Path), ) -@click.option( - "-m", "--fee", help="A fee to add to the offer when it gets taken, in XCH", default="0", show_default=True -) +@options.create_fee("A fee to add to the offer when it gets taken, in XCH") @click.option( "--reuse", help="Reuse existing address for the offer.", @@ -467,7 +467,7 @@ def make_offer_cmd( offer: Sequence[str], request: Sequence[str], filepath: pathlib.Path, - fee: str, + fee: uint64, reuse: bool, override: bool, ) -> None: @@ -481,7 +481,7 @@ def make_offer_cmd( make_offer( wallet_rpc_port=wallet_rpc_port, fp=fingerprint, - d_fee=Decimal(fee), + fee=fee, offers=offer, requests=request, filepath=filepath, @@ -501,7 +501,7 @@ def make_offer_cmd( default=None, ) @options.create_fingerprint() -@click.option("-id", "--id", help="The ID of the offer that you wish to examine") +@click.option("-id", "--id", help="The ID of the offer that you wish to examine", type=Bytes32ParamType()) @click.option("-p", "--filepath", help="The path to rewrite the offer file to (must be used in conjunction with --id)") @click.option("-em", "--exclude-my-offers", help="Exclude your own offers from the output", is_flag=True) @click.option("-et", "--exclude-taken-offers", help="Exclude offers that you've accepted from the output", is_flag=True) @@ -513,7 +513,7 @@ def make_offer_cmd( def get_offers_cmd( wallet_rpc_port: Optional[int], fingerprint: int, - id: Optional[str], + id: Optional[bytes32], filepath: Optional[str], exclude_my_offers: bool, exclude_taken_offers: bool, @@ -549,9 +549,8 @@ def get_offers_cmd( ) @options.create_fingerprint() @click.option("-e", "--examine-only", help="Print the summary of the offer file but do not take it", is_flag=True) -@click.option( - "-m", "--fee", help="The fee to use when pushing the completed offer, in XCH", default="0", show_default=True -) +@options.create_fee("The fee to use when pushing the completed offer, in XCH") +# TODO: Reuse is not used @click.option( "--reuse", help="Reuse existing address for the offer.", @@ -563,12 +562,12 @@ def take_offer_cmd( wallet_rpc_port: Optional[int], fingerprint: int, examine_only: bool, - fee: str, + fee: uint64, reuse: bool, ) -> None: from .wallet_funcs import take_offer - asyncio.run(take_offer(wallet_rpc_port, fingerprint, Decimal(fee), path_or_hex, examine_only)) # reuse is not used + asyncio.run(take_offer(wallet_rpc_port, fingerprint, fee, path_or_hex, examine_only)) @wallet_cmd.command("cancel_offer", help="Cancel an existing offer") @@ -580,15 +579,15 @@ def take_offer_cmd( default=None, ) @options.create_fingerprint() -@click.option("-id", "--id", help="The offer ID that you wish to cancel", required=True) +@click.option("-id", "--id", help="The offer ID that you wish to cancel", required=True, type=Bytes32ParamType()) @click.option("--insecure", help="Don't make an on-chain transaction, simply mark the offer as cancelled", is_flag=True) -@click.option( - "-m", "--fee", help="The fee to use when cancelling the offer securely, in XCH", default="0", show_default=True -) -def cancel_offer_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: str, insecure: bool, fee: str) -> None: +@options.create_fee("The fee to use when cancelling the offer securely, in XCH") +def cancel_offer_cmd( + wallet_rpc_port: Optional[int], fingerprint: int, id: bytes32, insecure: bool, fee: uint64 +) -> None: from .wallet_funcs import cancel_offer - asyncio.run(cancel_offer(wallet_rpc_port, fingerprint, Decimal(fee), id, not insecure)) + asyncio.run(cancel_offer(wallet_rpc_port, fingerprint, fee, id, not insecure)) @wallet_cmd.command("check", short_help="Check wallet DB integrity", help=check_help_text) @@ -628,21 +627,13 @@ def did_cmd() -> None: default=1, show_default=True, ) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() def did_create_wallet_cmd( - wallet_rpc_port: Optional[int], fingerprint: int, name: Optional[str], amount: int, fee: str + wallet_rpc_port: Optional[int], fingerprint: int, name: Optional[str], amount: int, fee: uint64 ) -> None: from .wallet_funcs import create_did_wallet - asyncio.run(create_did_wallet(wallet_rpc_port, fingerprint, Decimal(fee), name, amount)) + asyncio.run(create_did_wallet(wallet_rpc_port, fingerprint, fee, name, amount)) @did_cmd.command("sign_message", help="Sign a message by a DID") @@ -654,9 +645,9 @@ def did_create_wallet_cmd( default=None, ) @options.create_fingerprint() -@click.option("-i", "--did_id", help="DID ID you want to use for signing", type=str, required=True) +@click.option("-i", "--did_id", help="DID ID you want to use for signing", type=AddressParamType(), required=True) @click.option("-m", "--hex_message", help="The hex message you want to sign", type=str, required=True) -def did_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, did_id: str, hex_message: str) -> None: +def did_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, did_id: CliAddress, hex_message: str) -> None: from .wallet_funcs import sign_message asyncio.run( @@ -860,19 +851,12 @@ def did_message_spend_cmd( ) @options.create_fingerprint() @click.option("-i", "--id", help="Id of the DID wallet to use", type=int, required=True) -@click.option("-ta", "--target-address", help="Target recipient wallet address", type=str, required=True) +# TODO: Change RPC to use puzzlehash instead of address +@click.option("-ta", "--target-address", help="Target recipient wallet address", type=AddressParamType(), required=True) @click.option( "-rr", "--reset_recovery", help="If you want to reset the recovery DID settings.", is_flag=True, default=False ) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @click.option( "--reuse", help="Reuse existing address for the change.", @@ -883,9 +867,9 @@ def did_transfer_did( wallet_rpc_port: Optional[int], fingerprint: int, id: int, - target_address: str, + target_address: CliAddress, reset_recovery: bool, - fee: str, + fee: uint64, reuse: bool, ) -> None: from .wallet_funcs import transfer_did @@ -895,7 +879,7 @@ def did_transfer_did( wallet_rpc_port, fingerprint, id, - Decimal(fee), + fee, target_address, reset_recovery is False, True if reuse else None, @@ -917,10 +901,11 @@ def nft_cmd() -> None: default=None, ) @options.create_fingerprint() -@click.option("-di", "--did-id", help="DID Id to use", type=str) +# TODO: Change RPC to use puzzlehash instead of address +@click.option("-di", "--did-id", help="DID Id to use", type=AddressParamType()) @click.option("-n", "--name", help="Set the NFT wallet name", type=str) def nft_wallet_create_cmd( - wallet_rpc_port: Optional[int], fingerprint: int, did_id: Optional[str], name: Optional[str] + wallet_rpc_port: Optional[int], fingerprint: int, did_id: Optional[CliAddress], name: Optional[str] ) -> None: from .wallet_funcs import create_nft_wallet @@ -936,9 +921,9 @@ def nft_wallet_create_cmd( default=None, ) @options.create_fingerprint() -@click.option("-i", "--nft_id", help="NFT ID you want to use for signing", type=str, required=True) +@click.option("-i", "--nft_id", help="NFT ID you want to use for signing", type=AddressParamType(), required=True) @click.option("-m", "--hex_message", help="The hex message you want to sign", type=str, required=True) -def nft_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, nft_id: str, hex_message: str) -> None: +def nft_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, nft_id: CliAddress, hex_message: str) -> None: from .wallet_funcs import sign_message asyncio.run( @@ -962,8 +947,8 @@ def nft_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, nft_id: s ) @options.create_fingerprint() @click.option("-i", "--id", help="Id of the NFT wallet to use", type=int, required=True) -@click.option("-ra", "--royalty-address", help="Royalty address", type=str) -@click.option("-ta", "--target-address", help="Target address", type=str) +@click.option("-ra", "--royalty-address", help="Royalty address", type=AddressParamType()) +@click.option("-ta", "--target-address", help="Target address", type=AddressParamType()) @click.option("--no-did-ownership", help="Disable DID ownership support", is_flag=True, default=False) @click.option("-nh", "--hash", help="NFT content hash", type=str, required=True) @click.option("-u", "--uris", help="Comma separated list of URIs", type=str, required=True) @@ -973,15 +958,7 @@ def nft_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, nft_id: s @click.option("-lu", "--license-uris", help="Comma separated list of license URIs", type=str) @click.option("-et", "--edition-total", help="NFT edition total", type=int, show_default=True, default=1) @click.option("-en", "--edition-number", help="NFT edition number", show_default=True, default=1, type=int) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @click.option( "-rp", "--royalty-percentage-fraction", @@ -1000,8 +977,8 @@ def nft_mint_cmd( wallet_rpc_port: Optional[int], fingerprint: int, id: int, - royalty_address: Optional[str], - target_address: Optional[str], + royalty_address: Optional[CliAddress], + target_address: Optional[CliAddress], no_did_ownership: bool, hash: str, uris: str, @@ -1011,7 +988,7 @@ def nft_mint_cmd( license_uris: Optional[str], edition_total: Optional[int], edition_number: Optional[int], - fee: str, + fee: uint64, royalty_percentage_fraction: int, reuse: bool, ) -> None: @@ -1032,8 +1009,8 @@ def nft_mint_cmd( wallet_rpc_port=wallet_rpc_port, fp=fingerprint, wallet_id=id, - royalty_address=royalty_address, - target_address=target_address, + royalty_cli_address=royalty_address, + target_cli_address=target_address, no_did_ownership=no_did_ownership, hash=hash, uris=[u.strip() for u in uris.split(",")], @@ -1043,7 +1020,7 @@ def nft_mint_cmd( license_uris=license_uris_list, edition_total=edition_total, edition_number=edition_number, - d_fee=Decimal(fee), + fee=fee, royalty_percentage=royalty_percentage_fraction, reuse_puzhash=True if reuse else None, ) @@ -1060,19 +1037,12 @@ def nft_mint_cmd( ) @options.create_fingerprint() @click.option("-i", "--id", help="Id of the NFT wallet to use", type=int, required=True) +# TODO: change rpc to take bytes instead of a hex string @click.option("-ni", "--nft-coin-id", help="Id of the NFT coin to add the URI to", type=str, required=True) @click.option("-u", "--uri", help="URI to add to the NFT", type=str) @click.option("-mu", "--metadata-uri", help="Metadata URI to add to the NFT", type=str) @click.option("-lu", "--license-uri", help="License URI to add to the NFT", type=str) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @click.option( "--reuse", help="Reuse existing address for the change.", @@ -1087,7 +1057,7 @@ def nft_add_uri_cmd( uri: str, metadata_uri: str, license_uri: str, - fee: str, + fee: uint64, reuse: bool, ) -> None: from .wallet_funcs import add_uri_to_nft @@ -1097,7 +1067,7 @@ def nft_add_uri_cmd( wallet_rpc_port=wallet_rpc_port, fp=fingerprint, wallet_id=id, - d_fee=Decimal(fee), + fee=fee, nft_coin_id=nft_coin_id, uri=uri, metadata_uri=metadata_uri, @@ -1118,16 +1088,9 @@ def nft_add_uri_cmd( @options.create_fingerprint() @click.option("-i", "--id", help="Id of the NFT wallet to use", type=int, required=True) @click.option("-ni", "--nft-coin-id", help="Id of the NFT coin to transfer", type=str, required=True) -@click.option("-ta", "--target-address", help="Target recipient wallet address", type=str, required=True) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +# TODO: Change RPC to use puzzlehash instead of address +@click.option("-ta", "--target-address", help="Target recipient wallet address", type=AddressParamType(), required=True) +@options.create_fee() @click.option( "--reuse", help="Reuse existing address for the change.", @@ -1139,8 +1102,8 @@ def nft_transfer_cmd( fingerprint: int, id: int, nft_coin_id: str, - target_address: str, - fee: str, + target_address: CliAddress, + fee: uint64, reuse: bool, ) -> None: from .wallet_funcs import transfer_nft @@ -1150,9 +1113,9 @@ def nft_transfer_cmd( wallet_rpc_port=wallet_rpc_port, fp=fingerprint, wallet_id=id, - d_fee=Decimal(fee), + fee=fee, nft_coin_id=nft_coin_id, - target_address=target_address, + target_cli_address=target_address, reuse_puzhash=True if reuse else None, ) ) @@ -1186,17 +1149,10 @@ def nft_list_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int, num: ) @options.create_fingerprint() @click.option("-i", "--id", help="Id of the NFT wallet to use", type=int, required=True) +# TODO: Change RPC to use bytes instead of hex string @click.option("-di", "--did-id", help="DID Id to set on the NFT", type=str, required=True) @click.option("-ni", "--nft-coin-id", help="Id of the NFT coin to set the DID on", type=str, required=True) -@click.option( - "-m", - "--fee", - help="Set the fees per transaction, in XCH.", - type=str, - default="0", - show_default=True, - callback=validate_fee, -) +@options.create_fee() @click.option( "--reuse", help="Reuse existing address for the change.", @@ -1209,7 +1165,7 @@ def nft_set_did_cmd( id: int, did_id: str, nft_coin_id: str, - fee: str, + fee: uint64, reuse: bool, ) -> None: from .wallet_funcs import set_nft_did @@ -1219,7 +1175,7 @@ def nft_set_did_cmd( wallet_rpc_port=wallet_rpc_port, fp=fingerprint, wallet_id=id, - d_fee=Decimal(fee), + fee=fee, nft_coin_id=nft_coin_id, did_id=did_id, reuse_puzhash=True if reuse else None, @@ -1236,6 +1192,7 @@ def nft_set_did_cmd( default=None, ) @options.create_fingerprint() +# TODO: Change RPC to use bytes instead of hex string @click.option("-ni", "--nft-coin-id", help="Id of the NFT coin to get information on", type=str, required=True) def nft_get_info_cmd( wallet_rpc_port: Optional[int], @@ -1265,29 +1222,32 @@ def notification_cmd() -> None: default=None, ) @options.create_fingerprint() -@click.option("-t", "--to-address", help="The address to send the notification to", type=str, required=True) +@click.option( + "-t", "--to-address", help="The address to send the notification to", type=AddressParamType(), required=True +) @click.option( "-a", "--amount", help="The amount to send to get the notification past the recipient's spam filter", - type=str, - default="0.00001", + type=AmountParamType(), + default=uint64(10000000), required=True, show_default=True, ) @click.option("-n", "--message", help="The message of the notification", type=str) -@click.option("-m", "--fee", help="The fee for the transaction, in XCH", type=str) +@options.create_fee() def send_notification_cmd( wallet_rpc_port: Optional[int], fingerprint: int, - to_address: str, - amount: str, + to_address: CliAddress, + amount: CliAmount, message: str, - fee: str, + fee: uint64, ) -> None: from .wallet_funcs import send_notification - asyncio.run(send_notification(wallet_rpc_port, fingerprint, Decimal(fee), to_address, message, Decimal(amount))) + message_bytes: bytes = bytes(message, "utf8") + asyncio.run(send_notification(wallet_rpc_port, fingerprint, fee, to_address, message_bytes, amount)) @notification_cmd.command("get", help="Get notification(s) that are in your wallet") @@ -1299,13 +1259,13 @@ def send_notification_cmd( default=None, ) @options.create_fingerprint() -@click.option("-i", "--id", help="The specific notification ID to show", type=str, multiple=True) +@click.option("-i", "--id", help="The specific notification ID to show", type=Bytes32ParamType(), multiple=True) @click.option("-s", "--start", help="The number of notifications to skip", type=int, default=None) @click.option("-e", "--end", help="The number of notifications to stop at", type=int, default=None) def get_notifications_cmd( wallet_rpc_port: Optional[int], fingerprint: int, - id: Sequence[str], + id: Sequence[bytes32], start: Optional[int], end: Optional[int], ) -> None: @@ -1323,12 +1283,12 @@ def get_notifications_cmd( default=None, ) @options.create_fingerprint() -@click.option("-i", "--id", help="A specific notification ID to delete", type=str, multiple=True) +@click.option("-i", "--id", help="A specific notification ID to delete", type=Bytes32ParamType(), multiple=True) @click.option("--all", help="All notifications can be deleted (they will be recovered during resync)", is_flag=True) def delete_notifications_cmd( wallet_rpc_port: Optional[int], fingerprint: int, - id: Sequence[str], + id: Sequence[bytes32], all: bool, ) -> None: from .wallet_funcs import delete_notifications @@ -1350,19 +1310,25 @@ def vcs_cmd() -> None: # pragma: no cover default=None, ) @options.create_fingerprint() -@click.option("-d", "--did", help="The DID of the VC's proof provider", type=str, required=True) -@click.option("-t", "--target-address", help="The address to send the VC to once it's minted", type=str, required=False) -@click.option("-m", "--fee", help="Blockchain fee for mint transaction, in XCH", type=str, required=False, default="0") +@click.option("-d", "--did", help="The DID of the VC's proof provider", type=AddressParamType(), required=True) +@click.option( + "-t", + "--target-address", + help="The address to send the VC to once it's minted", + type=AddressParamType(), + required=False, +) +@options.create_fee("Blockchain fee for mint transaction, in XCH") def mint_vc_cmd( wallet_rpc_port: Optional[int], fingerprint: int, - did: str, - target_address: Optional[str], - fee: str, + did: CliAddress, + target_address: Optional[CliAddress], + fee: uint64, ) -> None: # pragma: no cover from .wallet_funcs import mint_vc - asyncio.run(mint_vc(wallet_rpc_port, fingerprint, did, Decimal(fee), target_address)) + asyncio.run(mint_vc(wallet_rpc_port, fingerprint, did, fee, target_address)) @vcs_cmd.command("get", short_help="Get a list of existing VCs") @@ -1400,18 +1366,22 @@ def get_vcs_cmd( default=None, ) @options.create_fingerprint() -@click.option("-l", "--vc-id", help="The launcher ID of the VC whose proofs should be updated", type=str, required=True) +@click.option( + "-l", + "--vc-id", + help="The launcher ID of the VC whose proofs should be updated", + type=Bytes32ParamType(), + required=True, +) @click.option( "-t", "--new-puzhash", help="The address to send the VC after the proofs have been updated", - type=str, + type=Bytes32ParamType(), required=False, ) @click.option("-p", "--new-proof-hash", help="The new proof hash to update the VC to", type=str, required=True) -@click.option( - "-m", "--fee", help="Blockchain fee for update transaction, in XCH", type=str, required=False, default="0" -) +@options.create_fee("Blockchain fee for update transaction, in XCH") @click.option( "--reuse-puzhash/--generate-new-puzhash", help="Send the VC back to the same puzzle hash it came from (ignored if --new-puzhash is specified)", @@ -1421,10 +1391,10 @@ def get_vcs_cmd( def spend_vc_cmd( wallet_rpc_port: Optional[int], fingerprint: int, - vc_id: str, - new_puzhash: Optional[str], + vc_id: bytes32, + new_puzhash: Optional[bytes32], new_proof_hash: str, - fee: str, + fee: uint64, reuse_puzhash: bool, ) -> None: # pragma: no cover from .wallet_funcs import spend_vc @@ -1434,7 +1404,7 @@ def spend_vc_cmd( wallet_rpc_port=wallet_rpc_port, fp=fingerprint, vc_id=vc_id, - d_fee=Decimal(fee), + fee=fee, new_puzhash=new_puzhash, new_proof_hash=new_proof_hash, reuse_puzhash=reuse_puzhash, @@ -1497,19 +1467,17 @@ def get_proofs_for_root_cmd( "-p", "--parent-coin-id", help="The ID of the parent coin of the VC (optional if VC ID is used)", - type=str, + type=Bytes32ParamType(), required=False, ) @click.option( "-l", "--vc-id", help="The launcher ID of the VC to revoke (must be tracked by wallet) (optional if Parent ID is used)", - type=str, + type=Bytes32ParamType(), required=False, ) -@click.option( - "-m", "--fee", help="Blockchain fee for revocation transaction, in XCH", type=str, required=False, default="0" -) +@options.create_fee("Blockchain fee for revocation transaction, in XCH") @click.option( "--reuse-puzhash/--generate-new-puzhash", help="Send the VC back to the same puzzle hash it came from (ignored if --new-puzhash is specified)", @@ -1519,14 +1487,14 @@ def get_proofs_for_root_cmd( def revoke_vc_cmd( wallet_rpc_port: Optional[int], fingerprint: int, - parent_coin_id: Optional[str], - vc_id: Optional[str], - fee: str, + parent_coin_id: Optional[bytes32], + vc_id: Optional[bytes32], + fee: uint64, reuse_puzhash: bool, ) -> None: # pragma: no cover from .wallet_funcs import revoke_vc - asyncio.run(revoke_vc(wallet_rpc_port, fingerprint, parent_coin_id, vc_id, Decimal(fee), reuse_puzhash)) + asyncio.run(revoke_vc(wallet_rpc_port, fingerprint, parent_coin_id, vc_id, fee, reuse_puzhash)) @vcs_cmd.command("approve_r_cats", help="Claim any R-CATs that are currently pending VC approval") @@ -1540,13 +1508,23 @@ def revoke_vc_cmd( @options.create_fingerprint() @click.option("-i", "--id", help="Id of the wallet with the pending approval balance", type=int, required=True) @click.option( - "-a", "--min-amount-to-claim", help="The minimum amount to approve to move into the wallet", type=str, required=True + "-a", + "--min-amount-to-claim", + help="The minimum amount to approve to move into the wallet", + type=AmountParamType(), + required=True, +) +@options.create_fee("Blockchain fee for approval transaction, in XCH") +@click.option( + "-ma", + "--min-coin-amount", + type=AmountParamType(), + default=cli_amount_none, + help="The minimum coin amount to select", ) @click.option( - "-m", "--fee", type=str, default=0, show_default=True, help="Blockchain fee for approval transaction, in XCH" + "-l", "--max-coin-amount", type=AmountParamType(), default=cli_amount_none, help="The maximum coin amount to select" ) -@click.option("-ma", "--min-coin-amount", type=Decimal, help="The minimum coin amount to select") -@click.option("-l", "--max-coin-amount", type=Decimal, help="The maximum coin amount to select") @click.option( "--reuse", help="Reuse existing address for the change.", @@ -1557,16 +1535,16 @@ def approve_r_cats_cmd( wallet_rpc_port: Optional[int], fingerprint: int, id: int, - min_amount_to_claim: str, - fee: str, - min_coin_amount: Optional[Decimal], - max_coin_amount: Optional[Decimal], + min_amount_to_claim: CliAmount, + fee: uint64, + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, reuse: bool, ) -> None: # pragma: no cover from .wallet_funcs import approve_r_cats asyncio.run( approve_r_cats( - wallet_rpc_port, fingerprint, id, min_amount_to_claim, Decimal(fee), min_coin_amount, max_coin_amount, reuse + wallet_rpc_port, fingerprint, uint32(id), min_amount_to_claim, fee, min_coin_amount, max_coin_amount, reuse ) ) diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py index ab762b9ae60e..2be8dcce75f9 100644 --- a/chia/cmds/wallet_funcs.py +++ b/chia/cmds/wallet_funcs.py @@ -17,6 +17,7 @@ transaction_status_msg, transaction_submitted_msg, ) +from chia.cmds.param_types import CliAddress, CliAmount from chia.cmds.peer_funcs import print_connections from chia.cmds.units import units from chia.rpc.wallet_request_types import GetNotifications @@ -35,7 +36,7 @@ from chia.wallet.trading.trade_status import TradeStatus from chia.wallet.transaction_record import TransactionRecord from chia.wallet.transaction_sorting import SortKey -from chia.wallet.util.address_type import AddressType, ensure_valid_address +from chia.wallet.util.address_type import AddressType from chia.wallet.util.puzzle_decorator_type import PuzzleDecoratorType from chia.wallet.util.query_filter import HashFilter, TransactionTypeFilter from chia.wallet.util.transaction_type import CLAWBACK_INCOMING_TRANSACTION_TYPES, TransactionType @@ -69,7 +70,7 @@ def print_transaction( address_prefix: str, mojo_per_unit: int, coin_record: Optional[Dict[str, Any]] = None, -) -> None: # pragma: no cover +) -> None: if verbose: print(tx) else: @@ -91,7 +92,7 @@ def print_transaction( print("") -def get_mojo_per_unit(wallet_type: WalletType) -> int: # pragma: no cover +def get_mojo_per_unit(wallet_type: WalletType) -> int: mojo_per_unit: int if wallet_type in { WalletType.STANDARD_WALLET, @@ -127,13 +128,13 @@ async def get_unit_name_for_wallet_id( wallet_type: WalletType, wallet_id: int, wallet_client: WalletRpcClient, -) -> str: # pragma: no cover +) -> str: if wallet_type in { WalletType.STANDARD_WALLET, WalletType.POOLING_WALLET, WalletType.DATA_LAYER, WalletType.VC, - }: # pragma: no cover + }: name: str = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"].upper() elif wallet_type in {WalletType.CAT, WalletType.CRCAT}: name = await wallet_client.get_cat_name(wallet_id=wallet_id) @@ -185,7 +186,7 @@ async def get_transactions( sort_key: SortKey, reverse: bool, clawback: bool, -) -> None: # pragma: no cover +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if paginate is None: paginate = sys.stdout.isatty() @@ -253,7 +254,7 @@ async def get_transactions( break -def check_unusual_transaction(amount: Decimal, fee: Decimal) -> bool: +def check_unusual_transaction(amount: uint64, fee: uint64) -> bool: return fee >= amount @@ -262,32 +263,23 @@ async def send( wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, - amount: Decimal, + amount: CliAmount, memo: Optional[str], - fee: Decimal, - address: str, + fee: uint64, + address: CliAddress, override: bool, - min_coin_amount: str, - max_coin_amount: Optional[str], - excluded_coin_ids: Sequence[str], + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, + excluded_coin_ids: Sequence[bytes32], reuse_puzhash: Optional[bool], clawback_time_lock: int, -) -> None: # pragma: no cover +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if memo is None: memos = None else: memos = [memo] - if not override and check_unusual_transaction(amount, fee): - print( - f"A transaction of amount {amount} and fee {fee} is unusual.\n" - f"Pass in --override if you are sure you mean to do this." - ) - return - if amount == 0: - print("You can not send an empty transaction") - return if clawback_time_lock < 0: print("Clawback time lock seconds cannot be negative.") return @@ -298,21 +290,31 @@ async def send( print(f"Wallet id: {wallet_id} not found.") return - final_fee: uint64 = uint64(int(fee * units["chia"])) # fees are always in XCH mojos - final_amount: uint64 = uint64(int(amount * mojo_per_unit)) + final_amount: uint64 = amount.convert_amount(mojo_per_unit) + + if not override and check_unusual_transaction(final_amount, fee): + print( + f"A transaction of amount {final_amount / units['chia']} and fee {fee} is unusual.\n" + f"Pass in --override if you are sure you mean to do this." + ) + return + if final_amount == 0: + print("You can not send an empty transaction") + return + if typ == WalletType.STANDARD_WALLET: print("Submitting transaction...") res = await wallet_client.send_transaction( wallet_id, final_amount, - address, + address.original_address, CMDTXConfigLoader( min_coin_amount=min_coin_amount, max_coin_amount=max_coin_amount, excluded_coin_ids=list(excluded_coin_ids), reuse_puzhash=reuse_puzhash, ).to_tx_config(mojo_per_unit, config, fingerprint), - final_fee, + fee, memos, puzzle_decorator_override=( [{"decorator": PuzzleDecoratorType.CLAWBACK.name, "clawback_timelock": clawback_time_lock}] @@ -331,8 +333,8 @@ async def send( reuse_puzhash=reuse_puzhash, ).to_tx_config(mojo_per_unit, config, fingerprint), final_amount, - address, - final_fee, + address.original_address, + fee, memos, ) else: @@ -379,44 +381,31 @@ async def update_derivation_index(wallet_rpc_port: Optional[int], fp: Optional[i print("Your balances may take a while to update.") -async def add_token(wallet_rpc_port: Optional[int], fp: Optional[int], asset_id: str, token_name: str) -> None: +async def add_token(wallet_rpc_port: Optional[int], fp: Optional[int], asset_id: bytes32, token_name: str) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, _): - try: - asset_id_bytes: bytes32 = bytes32.from_hexstr(asset_id) - existing_info: Optional[Tuple[Optional[uint32], str]] = await wallet_client.cat_asset_id_to_name( - asset_id_bytes - ) - if existing_info is None or existing_info[0] is None: - response = await wallet_client.create_wallet_for_existing_cat(asset_id_bytes) - wallet_id = response["wallet_id"] - await wallet_client.set_cat_name(wallet_id, token_name) - print(f"Successfully added {token_name} with wallet id {wallet_id} on key {fingerprint}") - else: - wallet_id, old_name = existing_info - await wallet_client.set_cat_name(wallet_id, token_name) - print( - f"Successfully renamed {old_name} with wallet_id {wallet_id} on key {fingerprint} to {token_name}" - ) - except ValueError as e: - if "fromhex()" in str(e): - print(f"{asset_id} is not a valid Asset ID") - else: - raise + existing_info: Optional[Tuple[Optional[uint32], str]] = await wallet_client.cat_asset_id_to_name(asset_id) + if existing_info is None or existing_info[0] is None: + response = await wallet_client.create_wallet_for_existing_cat(asset_id) + wallet_id = response["wallet_id"] + await wallet_client.set_cat_name(wallet_id, token_name) + print(f"Successfully added {token_name} with wallet id {wallet_id} on key {fingerprint}") + else: + wallet_id, old_name = existing_info + await wallet_client.set_cat_name(wallet_id, token_name) + print(f"Successfully renamed {old_name} with wallet_id {wallet_id} on key {fingerprint} to {token_name}") async def make_offer( *, wallet_rpc_port: Optional[int], fp: Optional[int], - d_fee: Decimal, + fee: uint64, offers: Sequence[str], requests: Sequence[str], filepath: pathlib.Path, reuse_puzhash: Optional[bool], ) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): - fee: int = int(d_fee * units["chia"]) - if offers == [] or requests == []: print("Not creating offer: Must be offering and requesting at least one asset") else: @@ -650,7 +639,7 @@ async def get_offers( *, wallet_rpc_port: Optional[int], fp: Optional[int], - offer_id: Optional[str], + offer_id: Optional[bytes32], filepath: Optional[str], exclude_my_offers: bool = False, exclude_taken_offers: bool = False, @@ -686,7 +675,7 @@ async def get_offers( start = end end += batch_size else: - records = [await wallet_client.get_offer(bytes32.from_hexstr(offer_id), file_contents)] + records = [await wallet_client.get_offer(offer_id, file_contents)] if filepath is not None: with open(pathlib.Path(filepath), "w") as file: file.write(Offer.from_bytes(records[0].offer).to_bech32()) @@ -699,7 +688,7 @@ async def get_offers( async def take_offer( wallet_rpc_port: Optional[int], fp: Optional[int], - d_fee: Decimal, + fee: uint64, file: str, examine_only: bool, ) -> None: @@ -712,8 +701,6 @@ async def take_offer( else: offer_hex = file - fee: int = int(d_fee * units["chia"]) - try: offer = Offer.from_bech32(offer_hex) except ValueError: @@ -795,14 +782,11 @@ async def take_offer( async def cancel_offer( wallet_rpc_port: Optional[int], fp: Optional[int], - d_fee: Decimal, - offer_id_hex: str, + fee: uint64, + offer_id: bytes32, secure: bool, ) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): - offer_id = bytes32.from_hexstr(offer_id_hex) - fee: int = int(d_fee * units["chia"]) - trade_record = await wallet_client.get_offer(offer_id, file_contents=True) await print_trade_record(trade_record, wallet_client, summaries=True) @@ -815,7 +799,7 @@ async def cancel_offer( print(f"Use chia wallet get_offers --id {trade_record.trade_id} -f {fingerprint} to view cancel status") -def wallet_coin_unit(typ: WalletType, address_prefix: str) -> Tuple[str, int]: # pragma: no cover +def wallet_coin_unit(typ: WalletType, address_prefix: str) -> Tuple[str, int]: if typ in {WalletType.CAT, WalletType.CRCAT}: return "", units["cat"] if typ in [WalletType.STANDARD_WALLET, WalletType.POOLING_WALLET, WalletType.MULTI_SIG]: @@ -836,7 +820,7 @@ def print_balance(amount: int, scale: int, address_prefix: str, *, decimal_only: async def print_balances( wallet_rpc_port: Optional[int], fp: Optional[int], wallet_type: Optional[WalletType] = None -) -> None: # pragma: no cover +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): summaries_response = await wallet_client.get_wallets(wallet_type) address_prefix = selected_network_address_prefix(config) @@ -914,10 +898,9 @@ async def print_balances( async def create_did_wallet( - wallet_rpc_port: Optional[int], fp: Optional[int], d_fee: Decimal, name: Optional[str], amount: int + wallet_rpc_port: Optional[int], fp: Optional[int], fee: uint64, name: Optional[str], amount: int ) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): - fee: int = int(d_fee * units["chia"]) try: response = await wallet_client.create_new_did_wallet( amount, CMDTXConfigLoader().to_tx_config(units["chia"], config, fingerprint), fee, name @@ -1021,15 +1004,14 @@ async def transfer_did( wallet_rpc_port: Optional[int], fp: Optional[int], did_wallet_id: int, - d_fee: Decimal, - target_address: str, + fee: uint64, + target_cli_address: CliAddress, with_recovery: bool, reuse_puzhash: Optional[bool], ) -> None: - fee: int = int(d_fee * units["chia"]) - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: + target_address = target_cli_address.original_address response = await wallet_client.did_transfer_did( did_wallet_id, target_address, @@ -1072,11 +1054,11 @@ async def find_lost_did( async def create_nft_wallet( - wallet_rpc_port: Optional[int], fp: Optional[int], did_id: Optional[str] = None, name: Optional[str] = None + wallet_rpc_port: Optional[int], fp: Optional[int], did_id: Optional[CliAddress] = None, name: Optional[str] = None ) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: - response = await wallet_client.create_new_nft_wallet(did_id, name) + response = await wallet_client.create_new_nft_wallet(did_id.original_address if did_id else None, name) wallet_id = response["wallet_id"] print(f"Successfully created an NFT wallet with id {wallet_id} on key {fingerprint}") except Exception as e: @@ -1088,8 +1070,8 @@ async def mint_nft( wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, - royalty_address: Optional[str], - target_address: Optional[str], + royalty_cli_address: Optional[CliAddress], + target_cli_address: Optional[CliAddress], no_did_ownership: bool, hash: str, uris: List[str], @@ -1099,22 +1081,13 @@ async def mint_nft( license_uris: List[str], edition_total: Optional[int], edition_number: Optional[int], - d_fee: Decimal, + fee: uint64, royalty_percentage: int, reuse_puzhash: Optional[bool], ) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): - royalty_address = ( - None - if not royalty_address - else ensure_valid_address(royalty_address, allowed_types={AddressType.XCH}, config=config) - ) - target_address = ( - None - if not target_address - else ensure_valid_address(target_address, allowed_types={AddressType.XCH}, config=config) - ) - fee: int = int(d_fee * units["chia"]) + royalty_address = royalty_cli_address.validate_address_type(AddressType.XCH) if royalty_cli_address else None + target_address = target_cli_address.validate_address_type(AddressType.XCH) if target_cli_address else None try: response = await wallet_client.get_nft_wallet_did(wallet_id) wallet_did = response["did_id"] @@ -1160,7 +1133,7 @@ async def add_uri_to_nft( wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, - d_fee: Decimal, + fee: uint64, nft_coin_id: str, uri: Optional[str], metadata_uri: Optional[str], @@ -1182,7 +1155,6 @@ async def add_uri_to_nft( uri_value = license_uri else: raise ValueError("You must provide at least one of the URI flags") - fee: int = int(d_fee * units["chia"]) response = await wallet_client.add_uri_to_nft( wallet_id, nft_coin_id, @@ -1204,15 +1176,14 @@ async def transfer_nft( wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, - d_fee: Decimal, + fee: uint64, nft_coin_id: str, - target_address: str, + target_cli_address: CliAddress, reuse_puzhash: Optional[bool], ) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: - target_address = ensure_valid_address(target_address, allowed_types={AddressType.XCH}, config=config) - fee: int = int(d_fee * units["chia"]) + target_address = target_cli_address.validate_address_type(AddressType.XCH) response = await wallet_client.transfer_nft( wallet_id, nft_coin_id, @@ -1287,13 +1258,12 @@ async def set_nft_did( wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, - d_fee: Decimal, + fee: uint64, nft_coin_id: str, did_id: str, reuse_puzhash: Optional[bool], ) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): - fee: int = int(d_fee * units["chia"]) try: response = await wallet_client.set_nft_did( wallet_id, @@ -1380,31 +1350,30 @@ def fungible_assets_from_offer(offer: Offer) -> List[Optional[bytes32]]: async def send_notification( wallet_rpc_port: Optional[int], fp: Optional[int], - d_fee: Decimal, - address_str: str, - message_hex: str, - d_amount: Decimal, + fee: uint64, + address: CliAddress, + message: bytes, + cli_amount: CliAmount, ) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): - address: bytes32 = decode_puzzle_hash(address_str) - amount: uint64 = uint64(d_amount * units["chia"]) - message: bytes = bytes(message_hex, "utf8") - fee: uint64 = uint64(d_fee * units["chia"]) + amount: uint64 = cli_amount.convert_amount(units["chia"]) - tx = await wallet_client.send_notification(address, message, amount, fee) + tx = await wallet_client.send_notification(address.puzzle_hash, message, amount, fee) print("Notification sent successfully.") print(f"To get status, use command: chia wallet get_transaction -f {fingerprint} -tx 0x{tx.name}") async def get_notifications( - wallet_rpc_port: Optional[int], fp: Optional[int], str_ids: Sequence[str], start: Optional[int], end: Optional[int] + wallet_rpc_port: Optional[int], + fp: Optional[int], + ids: Optional[Sequence[bytes32]], + start: Optional[int], + end: Optional[int], ) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): - ids: Optional[List[bytes32]] = [bytes32.from_hexstr(sid) for sid in str_ids] - if ids is not None and len(ids) == 0: - ids = None - + if ids is not None: + ids = None if len(ids) == 0 else list(ids) response = await wallet_client.get_notifications( GetNotifications(ids=ids, start=uint32.construct_optional(start), end=uint32.construct_optional(end)) ) @@ -1416,15 +1385,13 @@ async def get_notifications( async def delete_notifications( - wallet_rpc_port: Optional[int], fp: Optional[int], str_ids: Sequence[str], delete_all: bool + wallet_rpc_port: Optional[int], fp: Optional[int], ids: Sequence[bytes32], delete_all: bool ) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): - ids: Optional[List[bytes32]] = [bytes32.from_hexstr(sid) for sid in str_ids] - if delete_all: print(f"Success: {await wallet_client.delete_notifications()}") else: - print(f"Success: {await wallet_client.delete_notifications(ids=ids)}") + print(f"Success: {await wallet_client.delete_notifications(ids=list(ids))}") async def sign_message( @@ -1433,26 +1400,28 @@ async def sign_message( fp: Optional[int], addr_type: AddressType, message: str, - address: Optional[str] = None, - did_id: Optional[str] = None, - nft_id: Optional[str] = None, + address: Optional[CliAddress] = None, + did_id: Optional[CliAddress] = None, + nft_id: Optional[CliAddress] = None, ) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if addr_type == AddressType.XCH: if address is None: print("Address is required for XCH address type.") return - pubkey, signature, signing_mode = await wallet_client.sign_message_by_address(address, message) + pubkey, signature, signing_mode = await wallet_client.sign_message_by_address( + address.original_address, message + ) elif addr_type == AddressType.DID: if did_id is None: print("DID id is required for DID address type.") return - pubkey, signature, signing_mode = await wallet_client.sign_message_by_id(did_id, message) + pubkey, signature, signing_mode = await wallet_client.sign_message_by_id(did_id.original_address, message) elif addr_type == AddressType.NFT: if nft_id is None: print("NFT id is required for NFT address type.") return - pubkey, signature, signing_mode = await wallet_client.sign_message_by_id(nft_id, message) + pubkey, signature, signing_mode = await wallet_client.sign_message_by_id(nft_id.original_address, message) else: print("Invalid wallet type.") return @@ -1464,8 +1433,8 @@ async def sign_message( async def spend_clawback( - *, wallet_rpc_port: Optional[int], fp: Optional[int], fee: Decimal, tx_ids_str: str, force: bool = False -) -> None: # pragma: no cover + *, wallet_rpc_port: Optional[int], fp: Optional[int], fee: uint64, tx_ids_str: str, force: bool = False +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): tx_ids = [] for tid in tx_ids_str.split(","): @@ -1476,25 +1445,23 @@ async def spend_clawback( if fee < 0: print("Batch fee cannot be negative.") return - response = await wallet_client.spend_clawback_coins(tx_ids, int(fee * units["chia"]), force) + response = await wallet_client.spend_clawback_coins(tx_ids, fee, force) print(str(response)) async def mint_vc( - wallet_rpc_port: Optional[int], fp: Optional[int], did: str, d_fee: Decimal, target_address: Optional[str] -) -> None: # pragma: no cover + wallet_rpc_port: Optional[int], + fp: Optional[int], + did: CliAddress, + fee: uint64, + target_address: Optional[CliAddress], +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): vc_record, txs = await wallet_client.vc_mint( - decode_puzzle_hash(ensure_valid_address(did, allowed_types={AddressType.DID}, config=config)), + did.validate_address_type_get_ph(AddressType.DID), CMDTXConfigLoader().to_tx_config(units["chia"], config, fingerprint), - ( - None - if target_address is None - else decode_puzzle_hash( - ensure_valid_address(target_address, allowed_types={AddressType.XCH}, config=config) - ) - ), - uint64(int(d_fee * units["chia"])), + target_address.validate_address_type_get_ph(AddressType.XCH) if target_address else None, + fee, ) print(f"New VC with launcher ID minted: {vc_record.vc.launcher_id}") @@ -1510,9 +1477,7 @@ async def mint_vc( ) -async def get_vcs( - wallet_rpc_port: Optional[int], fp: Optional[int], start: int, count: int -) -> None: # pragma: no cover +async def get_vcs(wallet_rpc_port: Optional[int], fp: Optional[int], start: int, count: int) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): vc_records, proofs = await wallet_client.vc_get_list(start, count) print("Proofs:") @@ -1539,18 +1504,18 @@ async def spend_vc( *, wallet_rpc_port: Optional[int], fp: Optional[int], - vc_id: str, - d_fee: Decimal, - new_puzhash: Optional[str], + vc_id: bytes32, + fee: uint64, + new_puzhash: Optional[bytes32], new_proof_hash: str, reuse_puzhash: bool, -) -> None: # pragma: no cover +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): txs = await wallet_client.vc_spend( - bytes32.from_hexstr(vc_id), - new_puzhash=None if new_puzhash is None else bytes32.from_hexstr(new_puzhash), + vc_id, + new_puzhash=new_puzhash, new_proof_hash=bytes32.from_hexstr(new_proof_hash), - fee=uint64(int(d_fee * units["chia"])), + fee=fee, tx_config=CMDTXConfigLoader( reuse_puzhash=reuse_puzhash, ).to_tx_config(units["chia"], config, fingerprint), @@ -1571,7 +1536,7 @@ async def spend_vc( async def add_proof_reveal( wallet_rpc_port: Optional[int], fp: Optional[int], proofs: Sequence[str], root_only: bool -) -> None: # pragma: no cover +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if len(proofs) == 0: print("Must specify at least one proof") @@ -1587,9 +1552,7 @@ async def add_proof_reveal( return -async def get_proofs_for_root( - wallet_rpc_port: Optional[int], fp: Optional[int], proof_hash: str -) -> None: # pragma: no cover +async def get_proofs_for_root(wallet_rpc_port: Optional[int], fp: Optional[int], proof_hash: str) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): proof_dict: Dict[str, str] = await wallet_client.vc_get_proofs_for_root(bytes32.from_hexstr(proof_hash)) print("Proofs:") @@ -1600,26 +1563,26 @@ async def get_proofs_for_root( async def revoke_vc( wallet_rpc_port: Optional[int], fp: Optional[int], - parent_coin_id: Optional[str], - vc_id: Optional[str], - fee: Decimal, + parent_coin_id: Optional[bytes32], + vc_id: Optional[bytes32], + fee: uint64, reuse_puzhash: bool, -) -> None: # pragma: no cover +) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if parent_coin_id is None: if vc_id is None: print("Must specify either --parent-coin-id or --vc-id") return - record = await wallet_client.vc_get(bytes32.from_hexstr(vc_id)) + record = await wallet_client.vc_get(vc_id) if record is None: - print(f"Cannot find a VC with ID {vc_id}") + print(f"Cannot find a VC with ID {vc_id.hex()}") return parent_id: bytes32 = bytes32(record.vc.coin.parent_coin_info) else: - parent_id = bytes32.from_hexstr(parent_coin_id) + parent_id = parent_coin_id txs = await wallet_client.vc_revoke( parent_id, - fee=uint64(fee * units["chia"]), + fee=fee, tx_config=CMDTXConfigLoader( reuse_puzhash=reuse_puzhash, ).to_tx_config(units["chia"], config, fingerprint), @@ -1641,23 +1604,23 @@ async def revoke_vc( async def approve_r_cats( wallet_rpc_port: Optional[int], fingerprint: int, - id: int, - min_amount_to_claim: str, - fee: Decimal, - min_coin_amount: Optional[Decimal], - max_coin_amount: Optional[Decimal], + wallet_id: uint32, + min_amount_to_claim: CliAmount, + fee: uint64, + min_coin_amount: CliAmount, + max_coin_amount: CliAmount, reuse: bool, -) -> None: # pragma: no cover +) -> None: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): if wallet_client is None: return txs = await wallet_client.crcat_approve_pending( - wallet_id=uint32(id), - min_amount_to_claim=uint64(int(Decimal(min_amount_to_claim) * units["cat"])), - fee=uint64(int(fee * units["chia"])), + wallet_id=wallet_id, + min_amount_to_claim=min_amount_to_claim.convert_amount(units["cat"]), + fee=fee, tx_config=CMDTXConfigLoader( - min_coin_amount=None if min_coin_amount is None else str(min_coin_amount), - max_coin_amount=None if max_coin_amount is None else str(max_coin_amount), + min_coin_amount=min_coin_amount, + max_coin_amount=max_coin_amount, reuse_puzhash=reuse, ).to_tx_config(units["cat"], config, fingerprint), ) diff --git a/chia/rpc/wallet_rpc_client.py b/chia/rpc/wallet_rpc_client.py index 5f00c43624dc..a234fed136e3 100644 --- a/chia/rpc/wallet_rpc_client.py +++ b/chia/rpc/wallet_rpc_client.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Optional, Tuple, Union, cast +from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast from chia.data_layer.data_layer_util import DLProof, VerifyProofResponse from chia.data_layer.data_layer_wallet import Mirror, SingletonRecord @@ -1262,7 +1262,7 @@ async def dl_verify_proof(self, request: DLProof) -> VerifyProofResponse: async def get_notifications(self, request: GetNotifications) -> GetNotificationsResponse: return GetNotificationsResponse.from_json_dict(await self.fetch("get_notifications", request.to_json_dict())) - async def delete_notifications(self, ids: Optional[List[bytes32]] = None) -> bool: + async def delete_notifications(self, ids: Optional[Sequence[bytes32]] = None) -> bool: request = {} if ids is not None: request["ids"] = [id.hex() for id in ids] diff --git a/chia/wallet/trading/offer.py b/chia/wallet/trading/offer.py index e4eab664027a..ef14792deb24 100644 --- a/chia/wallet/trading/offer.py +++ b/chia/wallet/trading/offer.py @@ -72,7 +72,7 @@ def from_condition_and_nonce(cls, condition: Program, nonce: bytes32) -> Notariz return cls(puzzle_hash, amount, memos, nonce) -@dataclass(frozen=True) +@dataclass(frozen=True, eq=False) class Offer: requested_payments: Dict[ Optional[bytes32], List[NotarizedPayment] @@ -82,10 +82,10 @@ class Offer: # this is a cache of the coin additions made by the SpendBundle (_bundle) # ordered by the coin being spent - _additions: Dict[Coin, List[Coin]] = field(init=False) + _additions: Dict[Coin, List[Coin]] = field(init=False, repr=False) _hints: Dict[bytes32, bytes32] = field(init=False) - _offered_coins: Dict[Optional[bytes32], List[Coin]] = field(init=False) - _final_spend_bundle: Optional[SpendBundle] = field(init=False) + _offered_coins: Dict[Optional[bytes32], List[Coin]] = field(init=False, repr=False) + _final_spend_bundle: Optional[SpendBundle] = field(init=False, repr=False) _conditions: Optional[Dict[Coin, List[Condition]]] = field(init=False) @staticmethod From 4572390f40fce50ba186f13ae30b171887e3a104 Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Tue, 25 Jun 2024 10:07:01 -0700 Subject: [PATCH 26/77] Add 12-word mnemonic known issue (#18245) * Update CHANGELOG for 2.4.1 * Update some other items int he changelog * Revert "Update some other items int he changelog" This reverts commit 047b816cfe4a1f36174a382d8e74d5a67d2f5612. * changelog updates * Add 12-word mnemonic known issue * Update 12-word issue --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ef6835e8995..6604ceb0cb13 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ for setuptools_scm/PEP 440 reasons. ### Known Issues * A breaking backwards compatibility issue was introduced in 2.4.0 in the daemon RPC call `add_private_key`. We expect to resolve this in a future release. +* You cannot import or use a 12-word mnemonic key with 2.4.0 or 2.4.1. To import and use a 12-word mnemonic key we recommend you use 2.3.1. This will be resolved in a future release ### Deprecated macOS 11 (Big Sur) is deprecated. This release (2.4.1) will be the last release to support macOS 11 From b705350a9ecc1aad60ed7b44a0902ad609d9f357 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Tue, 25 Jun 2024 19:52:09 +0100 Subject: [PATCH 27/77] CHIA-801 Cover AGG_SIG_ME_ADDITIONAL_DATA in message construction for test_agg_sig_illegal_suffix (#18235) Cover AGG_SIG_ME_ADDITIONAL_DATA in message construction for test_agg_sig_illegal_suffix. --- chia/_tests/core/full_node/test_conditions.py | 1 + 1 file changed, 1 insertion(+) diff --git a/chia/_tests/core/full_node/test_conditions.py b/chia/_tests/core/full_node/test_conditions.py index 9f217df2e370..6be002904ad0 100644 --- a/chia/_tests/core/full_node/test_conditions.py +++ b/chia/_tests/core/full_node/test_conditions.py @@ -551,6 +551,7 @@ async def test_agg_sig_illegal_suffix( pubkey = sk.get_g1() coin = blocks[-2].get_included_reward_coins()[0] for msg in [ + c.AGG_SIG_ME_ADDITIONAL_DATA, c.AGG_SIG_PARENT_ADDITIONAL_DATA, c.AGG_SIG_PUZZLE_ADDITIONAL_DATA, c.AGG_SIG_AMOUNT_ADDITIONAL_DATA, From c25eeb08f74631aa8d886eb8031cd22cf2e858e9 Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Wed, 26 Jun 2024 09:21:33 -0700 Subject: [PATCH 28/77] [CHIA-504] Add support for external signers (PLEASE SQUASH) (#18199) * Repin hsms * Add to sdist only allowed list * Signer protocol tweaks * Convert wallet RPC client to deserialized types * Introduce @tx_out_cmd decorator * Make execute_signing_instructions RPC * Add transport layer support * Add signer commands * Rework wallet execute_signing_instructions * Inadvertent merge changes * Fix signer command tests * Try a different approach: recursive flattening * Bump hsms * Add comments * Test coverage * Coverage ignores * Remove inadvertent time traveler * Bring in lost time traveler * pylint * Attempt to make qr test less flaky * Actually test coverage ignore * Small refactor for test coverage * transport -> translation * missed one * transport -> translation * Fix asdict error * coverage * Forgoe rotation testing for now * Test coverage * jsonify_unsigned_txs -> full_jsonify * Move a class * Add better type checking to framework * explicitly state kwargs on _CommandParsingStage Co-authored-by: Kyle Altendorf * Address comments by @altendky * Consolidate add_private_key and add_public_key * Consolidate get_key_for_fingerprint and get_public_key_for_fingerprint * Test fix * Merge fix * Bad merge * Bad merge * oops * pylint * Tweak keychain_proxy.get_key_for_fingerprint * Fix wallet RPC test? * pragma: no cover * Redesign clvm_streamable * Fix test imports * black * fix one more test * Address comments by @altendky * Test coverage * bytes32 * Better CLI mnemonic check * Use functools.partial * Rename full jsonify to chip 29 * whoops missed a couple * Better CLI mnemonic check * black * pylint * Missed one * black * Farm at least one block * Revert last change and fix 0 block farming * black * bad merge * Merge fix * Inadvertent changes * Inadvertent changes * Syncronize with quex.tx_out_decorator2 * These are no longer dictionaries. * Use new clvm_streamable pattern * Fix offer endpoint * Add @tx_out_cmd decorator * Bump ecdsa version * Fix @marshal util to return proper transactions * Add get_public_key * port `chia wallet send` to @tx_out_cmd * merge fix of puzzle hash derivation * Port `chia wallet coins` to @tx_out_cmd * Port chia wallet clawback to @tx_out_cmd * Port `chia wallet take/cancel_offer` to @tx_out_cmd * Port `chia wallet did ...` to @tx_out_cmd * Port `chia wallet nft ...` to @tx_out_cmd * Port `chia wallet notifications send` to @tx_out_cmd * Port `chia wallet vcs ...` to @tx_out_cmd * Port `chia dao ...` to @tx_out_cmd * Exclude data layer and plotnft functions * [no ci] * Address comments by @AmineKhaldi * Fix rpc util * add test for BSTLSigningInstructions * [UNDO] d9c6f54398c9a50ddd67f6400ca40e17ee03f8b7 * [UNDO] 7fc1d2adfd9f1d89639c9ee859c93aa9839ef477 * [UNDO] 23c9ee854913cf498e8d320d56556688f8f3390c * [UNDO] aacc9f022883e0c6bb4a27abd19a6392a67bc9e9 * [UNDO] 548465403b9369acc914b3ae2711037cb731ee00 * Undo unintentional changes * Stuff that's not part of this * More stuff that doesn't belong * One last thing that doesn't belong * Unintentional change --------- Co-authored-by: Kyle Altendorf Co-authored-by: Amine Khaldi --- chia/_tests/cmds/cmd_test_utils.py | 49 +- chia/_tests/cmds/wallet/test_coins.py | 3 + chia/_tests/cmds/wallet/test_consts.py | 8 +- chia/_tests/cmds/wallet/test_dao.py | 74 ++- chia/_tests/cmds/wallet/test_did.py | 59 +- chia/_tests/cmds/wallet/test_nft.py | 66 ++- chia/_tests/cmds/wallet/test_notifications.py | 6 +- chia/_tests/cmds/wallet/test_tx_decorators.py | 27 + chia/_tests/cmds/wallet/test_vcs.py | 73 ++- chia/_tests/cmds/wallet/test_wallet.py | 124 +++-- chia/_tests/pools/test_pool_rpc.py | 8 +- .../wallet/cat_wallet/test_cat_wallet.py | 2 +- chia/_tests/wallet/cat_wallet/test_trades.py | 16 +- chia/_tests/wallet/conftest.py | 9 +- .../wallet/dao_wallet/test_dao_wallets.py | 147 ++--- chia/_tests/wallet/did_wallet/test_did.py | 1 - .../wallet/nft_wallet/test_nft_bulk_mint.py | 31 +- chia/_tests/wallet/rpc/test_wallet_rpc.py | 272 +++++----- chia/_tests/wallet/test_clvm_streamable.py | 93 ++++ chia/_tests/wallet/test_signer_protocol.py | 506 +++++++++++++++++- .../_tests/wallet/vc_wallet/test_vc_wallet.py | 80 +-- chia/cmds/chia.py | 2 + chia/cmds/cmds_util.py | 30 +- chia/cmds/coin_funcs.py | 51 +- chia/cmds/coins.py | 18 +- chia/cmds/dao.py | 75 ++- chia/cmds/dao_funcs.py | 230 ++++---- chia/cmds/data.py | 6 + chia/cmds/plotnft.py | 6 + chia/cmds/signer.py | 306 +++++++++++ chia/cmds/wallet.py | 128 +++-- chia/cmds/wallet_funcs.py | 279 ++++++---- chia/data_layer/data_layer.py | 26 +- chia/rpc/util.py | 48 +- chia/rpc/wallet_request_types.py | 252 ++++++++- chia/rpc/wallet_rpc_api.py | 21 +- chia/rpc/wallet_rpc_client.py | 236 ++++---- chia/util/streamable.py | 10 +- chia/wallet/signer_protocol.py | 2 +- chia/wallet/transaction_record.py | 9 +- chia/wallet/util/blind_signer_tl.py | 168 ++++++ chia/wallet/util/clvm_streamable.py | 114 +++- chia/wallet/wallet.py | 4 +- chia/wallet/wallet_state_manager.py | 10 + 44 files changed, 2817 insertions(+), 868 deletions(-) create mode 100644 chia/_tests/cmds/wallet/test_tx_decorators.py create mode 100644 chia/cmds/signer.py create mode 100644 chia/wallet/util/blind_signer_tl.py diff --git a/chia/_tests/cmds/cmd_test_utils.py b/chia/_tests/cmds/cmd_test_utils.py index 32e3211cce93..c46ad38792be 100644 --- a/chia/_tests/cmds/cmd_test_utils.py +++ b/chia/_tests/cmds/cmd_test_utils.py @@ -10,6 +10,7 @@ import chia.cmds.wallet_funcs from chia._tests.cmds.testing_classes import create_test_block_record +from chia._tests.cmds.wallet.test_consts import STD_TX, STD_UTX from chia.cmds.chia import cli as chia_cli from chia.cmds.cmds_util import _T_RpcClient, node_config_section_names from chia.consensus.block_record import BlockRecord @@ -18,6 +19,7 @@ from chia.rpc.farmer_rpc_client import FarmerRpcClient from chia.rpc.full_node_rpc_client import FullNodeRpcClient from chia.rpc.rpc_client import RpcClient +from chia.rpc.wallet_request_types import SendTransactionMultiResponse from chia.rpc.wallet_rpc_client import WalletRpcClient from chia.simulator.simulator_full_node_rpc_client import SimulatorFullNodeRpcClient from chia.types.blockchain_format.sized_bytes import bytes32 @@ -252,26 +254,33 @@ async def send_transaction_multi( tx_config: TXConfig, coins: Optional[List[Coin]] = None, fee: uint64 = uint64(0), - ) -> TransactionRecord: - self.add_to_log("send_transaction_multi", (wallet_id, additions, tx_config, coins, fee)) - return TransactionRecord( - confirmed_at_height=uint32(1), - created_at_time=uint64(1234), - to_puzzle_hash=bytes32([1] * 32), - amount=uint64(12345678), - fee_amount=uint64(1234567), - confirmed=False, - sent=uint32(0), - spend_bundle=SpendBundle([], G2Element()), - additions=[Coin(bytes32([1] * 32), bytes32([2] * 32), uint64(12345678))], - removals=[Coin(bytes32([2] * 32), bytes32([4] * 32), uint64(12345678))], - wallet_id=uint32(1), - sent_to=[("aaaaa", uint8(1), None)], - trade_id=None, - type=uint32(TransactionType.OUTGOING_TX.value), - name=bytes32([2] * 32), - memos=[(bytes32([3] * 32), [bytes([4] * 32)])], - valid_times=ConditionValidTimes(), + push: bool = True, + ) -> SendTransactionMultiResponse: + self.add_to_log("send_transaction_multi", (wallet_id, additions, tx_config, coins, fee, push)) + name = bytes32([2] * 32) + return SendTransactionMultiResponse( + [STD_UTX], + [STD_TX], + TransactionRecord( + confirmed_at_height=uint32(1), + created_at_time=uint64(1234), + to_puzzle_hash=bytes32([1] * 32), + amount=uint64(12345678), + fee_amount=uint64(1234567), + confirmed=False, + sent=uint32(0), + spend_bundle=SpendBundle([], G2Element()), + additions=[Coin(bytes32([1] * 32), bytes32([2] * 32), uint64(12345678))], + removals=[Coin(bytes32([2] * 32), bytes32([4] * 32), uint64(12345678))], + wallet_id=uint32(1), + sent_to=[("aaaaa", uint8(1), None)], + trade_id=None, + type=uint32(TransactionType.OUTGOING_TX.value), + name=name, + memos=[(bytes32([3] * 32), [bytes([4] * 32)])], + valid_times=ConditionValidTimes(), + ), + name, ) diff --git a/chia/_tests/cmds/wallet/test_coins.py b/chia/_tests/cmds/wallet/test_coins.py index 3fbc47f27482..9386e68f5e57 100644 --- a/chia/_tests/cmds/wallet/test_coins.py +++ b/chia/_tests/cmds/wallet/test_coins.py @@ -138,6 +138,7 @@ async def select_coins( Coin(get_bytes32(3), get_bytes32(4), uint64(1234560000)), ], 1000000000, + True, ), ( 1, @@ -155,6 +156,7 @@ async def select_coins( Coin(get_bytes32(5), get_bytes32(6), uint64(300000000000)), ], 1000000000, + True, ), ], } @@ -216,6 +218,7 @@ async def get_coin_records_by_names( DEFAULT_TX_CONFIG, [Coin(get_bytes32(1), get_bytes32(2), uint64(100000000000))], 1000000000, + True, ) ], } diff --git a/chia/_tests/cmds/wallet/test_consts.py b/chia/_tests/cmds/wallet/test_consts.py index 9aab7724f94b..d51da4fc9dc1 100644 --- a/chia/_tests/cmds/wallet/test_consts.py +++ b/chia/_tests/cmds/wallet/test_consts.py @@ -4,8 +4,9 @@ from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.spend_bundle import SpendBundle -from chia.util.ints import uint8, uint32, uint64 +from chia.util.ints import uint32, uint64 from chia.wallet.conditions import ConditionValidTimes +from chia.wallet.signer_protocol import KeyHints, SigningInstructions, TransactionInfo, UnsignedTransaction from chia.wallet.transaction_record import TransactionRecord from chia.wallet.util.transaction_type import TransactionType @@ -34,10 +35,13 @@ def get_bytes32(bytes_index: int) -> bytes32: additions=[Coin(get_bytes32(1), get_bytes32(2), uint64(12345678))], removals=[Coin(get_bytes32(2), get_bytes32(4), uint64(12345678))], wallet_id=uint32(1), - sent_to=[("aaaaa", uint8(1), None)], + sent_to=[], trade_id=None, type=uint32(TransactionType.OUTGOING_TX.value), name=get_bytes32(2), memos=[(get_bytes32(3), [bytes([4] * 32)])], valid_times=ConditionValidTimes(), ) + + +STD_UTX = UnsignedTransaction(TransactionInfo([]), SigningInstructions(KeyHints([], []), [])) diff --git a/chia/_tests/cmds/wallet/test_dao.py b/chia/_tests/cmds/wallet/test_dao.py index 83d8ca5e4c28..f61db9fd802d 100644 --- a/chia/_tests/cmds/wallet/test_dao.py +++ b/chia/_tests/cmds/wallet/test_dao.py @@ -9,7 +9,17 @@ from typing_extensions import override from chia._tests.cmds.cmd_test_utils import TestRpcClients, TestWalletRpcClient, run_cli_command_and_assert -from chia._tests.cmds.wallet.test_consts import FINGERPRINT_ARG +from chia._tests.cmds.wallet.test_consts import FINGERPRINT_ARG, STD_TX, STD_UTX +from chia.rpc.wallet_request_types import ( + CreateNewDAOWalletResponse, + DAOAddFundsToTreasuryResponse, + DAOCloseProposalResponse, + DAOCreateProposalResponse, + DAOExitLockupResponse, + DAOFreeCoinsFromFinishedProposalsResponse, + DAOSendToLockupResponse, + DAOVoteOnProposalResponse, +) from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.bech32m import encode_puzzle_hash from chia.util.ints import uint8, uint32, uint64 @@ -38,17 +48,22 @@ async def create_new_dao_wallet( name: Optional[str] = None, fee: uint64 = uint64(0), fee_for_cat: uint64 = uint64(0), - ) -> Dict[str, Union[str, int, bytes32]]: + push: bool = True, + ) -> CreateNewDAOWalletResponse: if not treasury_id: treasury_id = bytes32(token_bytes(32)) - return { - "success": True, - "type": "DAO", - "wallet_id": 2, - "treasury_id": treasury_id, - "cat_wallet_id": 3, - "dao_cat_wallet_id": 4, - } + return CreateNewDAOWalletResponse.from_json_dict( + { + "success": True, + "transactions": [STD_TX.to_json_dict()], + "unsigned_transactions": [STD_UTX.to_json_dict()], + "type": WalletType.DAO, + "wallet_id": 2, + "treasury_id": treasury_id, + "cat_wallet_id": 3, + "dao_cat_wallet_id": 4, + } + ) inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -126,8 +141,9 @@ async def dao_add_funds_to_treasury( tx_config: TXConfig, fee: uint64 = uint64(0), reuse_puzhash: Optional[bool] = None, - ) -> Dict[str, Union[str, bool]]: - return {"success": True, "tx_id": bytes32(b"1" * 32).hex()} + push: bool = True, + ) -> DAOAddFundsToTreasuryResponse: + return DAOAddFundsToTreasuryResponse([STD_UTX], [STD_TX], STD_TX.name, STD_TX) async def dao_get_rules( self, @@ -264,8 +280,9 @@ async def dao_vote_on_proposal( tx_config: TXConfig, is_yes_vote: bool, fee: uint64 = uint64(0), - ) -> Dict[str, Union[str, bool]]: - return {"success": True, "tx_id": bytes32(b"1" * 32).hex()} + push: bool = True, + ) -> DAOVoteOnProposalResponse: + return DAOVoteOnProposalResponse([STD_UTX], [STD_TX], STD_TX.name, STD_TX) async def dao_close_proposal( self, @@ -275,8 +292,9 @@ async def dao_close_proposal( fee: uint64 = uint64(0), self_destruct: bool = False, reuse_puzhash: Optional[bool] = None, - ) -> Dict[str, Union[str, bool]]: - return {"success": True, "tx_id": bytes32(b"1" * 32).hex()} + push: bool = True, + ) -> DAOCloseProposalResponse: + return DAOCloseProposalResponse([STD_UTX], [STD_TX], STD_TX.name, STD_TX) async def dao_create_proposal( self, @@ -292,8 +310,9 @@ async def dao_create_proposal( new_dao_rules: Optional[Dict[str, uint64]] = None, fee: uint64 = uint64(0), reuse_puzhash: Optional[bool] = None, - ) -> Dict[str, Union[str, bool]]: - return {"success": True, "proposal_id": "0xCAFEF00D"} + push: bool = True, + ) -> DAOCreateProposalResponse: + return DAOCreateProposalResponse([STD_UTX], [STD_TX], bytes32([0] * 32), STD_TX.name, STD_TX) async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> List[Dict[str, Union[str, int]]]: return [{"id": 1, "type": 0}, {"id": 2, "type": 14}] @@ -405,7 +424,7 @@ async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: "-m 0.1", "--reuse", ] - proposal_asserts = ["Successfully created proposal", "Proposal ID: 0xCAFEF00D"] + proposal_asserts = ["Successfully created proposal", f"Proposal ID: {bytes32([0] * 32).hex()}"] run_cli_command_and_assert(capsys, root_dir, spend_args, proposal_asserts) bad_spend_args = [ @@ -423,7 +442,7 @@ async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: "-m 0.1", "--reuse", ] - proposal_asserts = ["Successfully created proposal", "Proposal ID: 0xCAFEF00D"] + proposal_asserts = ["Successfully created proposal", f"Proposal ID: {bytes32([0] * 32).hex()}"] with pytest.raises(ValueError) as e_info: run_cli_command_and_assert(capsys, root_dir, bad_spend_args, proposal_asserts) assert e_info.value.args[0] == "Must include a json specification or an address / amount pair." @@ -475,8 +494,9 @@ async def dao_send_to_lockup( tx_config: TXConfig, fee: uint64 = uint64(0), reuse_puzhash: Optional[bool] = None, - ) -> Dict[str, Union[str, int]]: - return {"success": True, "tx_id": bytes32(b"x" * 32).hex()} + push: bool = True, + ) -> DAOSendToLockupResponse: + return DAOSendToLockupResponse([STD_UTX], [STD_TX], STD_TX.name, [STD_TX]) async def dao_free_coins_from_finished_proposals( self, @@ -484,8 +504,9 @@ async def dao_free_coins_from_finished_proposals( tx_config: TXConfig, fee: uint64 = uint64(0), reuse_puzhash: Optional[bool] = None, - ) -> Dict[str, Union[str, int]]: - return {"success": True, "tx_id": bytes32(b"x" * 32).hex()} + push: bool = True, + ) -> DAOFreeCoinsFromFinishedProposalsResponse: + return DAOFreeCoinsFromFinishedProposalsResponse([STD_UTX], [STD_TX], STD_TX.name, STD_TX) async def dao_exit_lockup( self, @@ -494,8 +515,9 @@ async def dao_exit_lockup( coins: Optional[List[Dict[str, Union[str, int]]]] = None, fee: uint64 = uint64(0), reuse_puzhash: Optional[bool] = None, - ) -> Dict[str, Union[str, int]]: - return {"success": True, "tx_id": bytes32(b"x" * 32).hex()} + push: bool = True, + ) -> DAOExitLockupResponse: + return DAOExitLockupResponse([STD_UTX], [STD_TX], STD_TX.name, STD_TX) @override async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: diff --git a/chia/_tests/cmds/wallet/test_did.py b/chia/_tests/cmds/wallet/test_did.py index d868b18f1ae2..8b7b3287c0c1 100644 --- a/chia/_tests/cmds/wallet/test_did.py +++ b/chia/_tests/cmds/wallet/test_did.py @@ -3,11 +3,17 @@ from pathlib import Path from typing import Dict, List, Optional, Tuple, Union +from chia_rs import G2Element + from chia._tests.cmds.cmd_test_utils import TestRpcClients, TestWalletRpcClient, logType, run_cli_command_and_assert -from chia._tests.cmds.wallet.test_consts import FINGERPRINT_ARG, get_bytes32 +from chia._tests.cmds.wallet.test_consts import FINGERPRINT_ARG, STD_TX, STD_UTX, get_bytes32 +from chia.rpc.wallet_request_types import DIDMessageSpendResponse, DIDTransferDIDResponse, DIDUpdateMetadataResponse from chia.types.blockchain_format.sized_bytes import bytes48 from chia.types.signing_mode import SigningMode +from chia.types.spend_bundle import SpendBundle from chia.util.bech32m import encode_puzzle_hash +from chia.util.config import load_config +from chia.util.ints import uint32 from chia.wallet.conditions import Condition, CreateCoinAnnouncement, CreatePuzzleAnnouncement from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG, TXConfig @@ -27,10 +33,11 @@ async def create_new_did_wallet( name: Optional[str] = "DID Wallet", backup_ids: Optional[List[str]] = None, required_num: int = 0, + push: bool = True, ) -> Dict[str, Union[str, int]]: if backup_ids is None: backup_ids = [] - self.add_to_log("create_new_did_wallet", (amount, tx_config, fee, name, backup_ids, required_num)) + self.add_to_log("create_new_did_wallet", (amount, tx_config, fee, name, backup_ids, required_num, push)) return {"wallet_id": 3, "my_did": "did:chia:testdid123456"} inst_rpc_client = DidCreateRpcClient() # pylint: disable=no-value-for-parameter @@ -43,7 +50,7 @@ async def create_new_did_wallet( ] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { - "create_new_did_wallet": [(3, DEFAULT_TX_CONFIG, 100000000000, "test", [], 0)], + "create_new_did_wallet": [(3, DEFAULT_TX_CONFIG, 100000000000, "test", [], 0, True)], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -175,9 +182,10 @@ async def update_did_metadata( wallet_id: int, metadata: Dict[str, object], tx_config: TXConfig, - ) -> Dict[str, object]: - self.add_to_log("update_did_metadata", (wallet_id, metadata, tx_config)) - return {"wallet_id": wallet_id, "spend_bundle": "spend bundle here"} + push: bool = True, + ) -> DIDUpdateMetadataResponse: + self.add_to_log("update_did_metadata", (wallet_id, metadata, tx_config, push)) + return DIDUpdateMetadataResponse([STD_UTX], [STD_TX], SpendBundle([], G2Element()), uint32(wallet_id)) inst_rpc_client = DidUpdateMetadataRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -194,10 +202,11 @@ async def update_did_metadata( "--reuse", ] # these are various things that should be in the output - assert_list = [f"Successfully updated DID wallet ID: {w_id}, Spend Bundle: spend bundle here"] + assert STD_TX.spend_bundle is not None + assert_list = [f"Successfully updated DID wallet ID: {w_id}, Spend Bundle: {STD_TX.spend_bundle.to_json_dict()}"] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { - "update_did_metadata": [(w_id, {"test": True}, DEFAULT_TX_CONFIG.override(reuse_puzhash=True))], + "update_did_metadata": [(w_id, {"test": True}, DEFAULT_TX_CONFIG.override(reuse_puzhash=True), True)], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -246,10 +255,10 @@ def test_did_message_spend(capsys: object, get_test_cli_clients: Tuple[TestRpcCl # set RPC Client class DidMessageSpendRpcClient(TestWalletRpcClient): async def did_message_spend( - self, wallet_id: int, tx_config: TXConfig, extra_conditions: Tuple[Condition, ...] - ) -> Dict[str, object]: - self.add_to_log("did_message_spend", (wallet_id, tx_config, extra_conditions)) - return {"spend_bundle": "spend bundle here"} + self, wallet_id: int, tx_config: TXConfig, extra_conditions: Tuple[Condition, ...], push: bool + ) -> DIDMessageSpendResponse: + self.add_to_log("did_message_spend", (wallet_id, tx_config, extra_conditions, True)) + return DIDMessageSpendResponse([STD_UTX], [STD_TX], SpendBundle([], G2Element())) inst_rpc_client = DidMessageSpendRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -268,7 +277,8 @@ async def did_message_spend( ",".join([announcement.hex() for announcement in puz_announcements]), ] # these are various things that should be in the output - assert_list = ["Message Spend Bundle: spend bundle here"] + assert STD_TX.spend_bundle is not None + assert_list = [f"Message Spend Bundle: {STD_TX.spend_bundle.to_json_dict()}"] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { "did_message_spend": [ @@ -279,6 +289,7 @@ async def did_message_spend( *(CreateCoinAnnouncement(ann) for ann in c_announcements), *(CreatePuzzleAnnouncement(ann) for ann in puz_announcements), ), + True, ) ], } @@ -297,9 +308,15 @@ async def did_transfer_did( fee: int, with_recovery: bool, tx_config: TXConfig, - ) -> Dict[str, object]: - self.add_to_log("did_transfer_did", (wallet_id, address, fee, with_recovery, tx_config)) - return {"transaction_id": get_bytes32(2).hex(), "transaction": "transaction here"} + push: bool, + ) -> DIDTransferDIDResponse: + self.add_to_log("did_transfer_did", (wallet_id, address, fee, with_recovery, tx_config, push)) + return DIDTransferDIDResponse( + [STD_UTX], + [STD_TX], + STD_TX, + STD_TX.name, + ) inst_rpc_client = DidTransferRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -317,13 +334,19 @@ async def did_transfer_did( t_address, ] # these are various things that should be in the output + config = load_config( + root_dir, + "config.yaml", + ) assert_list = [ f"Successfully transferred DID to {t_address}", f"Transaction ID: {get_bytes32(2).hex()}", - "Transaction: transaction here", + f"Transaction: {STD_TX.to_json_dict_convenience(config)}", ] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { - "did_transfer_did": [(w_id, t_address, 500000000000, True, DEFAULT_TX_CONFIG.override(reuse_puzhash=True))], + "did_transfer_did": [ + (w_id, t_address, 500000000000, True, DEFAULT_TX_CONFIG.override(reuse_puzhash=True), True) + ], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) diff --git a/chia/_tests/cmds/wallet/test_nft.py b/chia/_tests/cmds/wallet/test_nft.py index 1482dee3cbf3..c40627601751 100644 --- a/chia/_tests/cmds/wallet/test_nft.py +++ b/chia/_tests/cmds/wallet/test_nft.py @@ -3,10 +3,19 @@ from pathlib import Path from typing import Any, List, Optional, Tuple +from chia_rs import G2Element + from chia._tests.cmds.cmd_test_utils import TestRpcClients, TestWalletRpcClient, logType, run_cli_command_and_assert -from chia._tests.cmds.wallet.test_consts import FINGERPRINT, FINGERPRINT_ARG, get_bytes32 +from chia._tests.cmds.wallet.test_consts import FINGERPRINT, FINGERPRINT_ARG, STD_TX, STD_UTX, get_bytes32 +from chia.rpc.wallet_request_types import ( + NFTAddURIResponse, + NFTMintNFTResponse, + NFTSetNFTDIDResponse, + NFTTransferNFTResponse, +) from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.signing_mode import SigningMode +from chia.types.spend_bundle import SpendBundle from chia.util.bech32m import encode_puzzle_hash from chia.util.ints import uint8, uint16, uint32, uint64 from chia.wallet.nft_wallet.nft_info import NFTInfo @@ -87,7 +96,8 @@ async def mint_nft( royalty_percentage: int = 0, did_id: Optional[str] = None, reuse_puzhash: Optional[bool] = None, - ) -> dict[str, object]: + push: bool = True, + ) -> NFTMintNFTResponse: self.add_to_log( "mint_nft", ( @@ -106,9 +116,16 @@ async def mint_nft( royalty_percentage, did_id, reuse_puzhash, + push, ), ) - return {"spend_bundle": "spend bundle here"} + return NFTMintNFTResponse( + [STD_UTX], + [STD_TX], + uint32(wallet_id), + SpendBundle([], G2Element()), + bytes32([0] * 32).hex(), + ) inst_rpc_client = NFTCreateRpcClient() # pylint: disable=no-value-for-parameter target_addr = encode_puzzle_hash(get_bytes32(2), "xch") @@ -129,7 +146,7 @@ async def mint_nft( "--reuse", ] # these are various things that should be in the output - assert_list = ["NFT minted Successfully with spend bundle: spend bundle here"] + assert_list = [f"NFT minted Successfully with spend bundle: {STD_TX.spend_bundle}"] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { "get_nft_wallet_did": [(4,)], @@ -156,6 +173,7 @@ async def mint_nft( 500000000000, 0, "0xcee228b8638c67cb66a55085be99fa3b457ae5b56915896f581990f600b2c652", + True, ) ], } @@ -175,9 +193,10 @@ async def add_uri_to_nft( uri: str, fee: int, tx_config: TXConfig, - ) -> dict[str, object]: - self.add_to_log("add_uri_to_nft", (wallet_id, nft_coin_id, key, uri, fee, tx_config)) - return {"spend_bundle": "spend bundle here"} + push: bool, + ) -> NFTAddURIResponse: + self.add_to_log("add_uri_to_nft", (wallet_id, nft_coin_id, key, uri, fee, tx_config, push)) + return NFTAddURIResponse([STD_UTX], [STD_TX], uint32(wallet_id), SpendBundle([], G2Element())) inst_rpc_client = NFTAddUriRpcClient() # pylint: disable=no-value-for-parameter nft_coin_id = get_bytes32(2).hex() @@ -196,7 +215,8 @@ async def add_uri_to_nft( "--reuse", ] # these are various things that should be in the output - assert_list = ["URI added successfully with spend bundle: spend bundle here"] + assert STD_TX.spend_bundle is not None + assert_list = [f"URI added successfully with spend bundle: {STD_TX.spend_bundle.to_json_dict()}"] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { "add_uri_to_nft": [ @@ -207,6 +227,7 @@ async def add_uri_to_nft( "https://example.com/nft", 500000000000, DEFAULT_TX_CONFIG.override(reuse_puzhash=True), + True, ) ], } @@ -225,9 +246,15 @@ async def transfer_nft( target_address: str, fee: int, tx_config: TXConfig, - ) -> dict[str, object]: - self.add_to_log("transfer_nft", (wallet_id, nft_coin_id, target_address, fee, tx_config)) - return {"spend_bundle": "spend bundle here"} + push: bool, + ) -> NFTTransferNFTResponse: + self.add_to_log("transfer_nft", (wallet_id, nft_coin_id, target_address, fee, tx_config, push)) + return NFTTransferNFTResponse( + [STD_UTX], + [STD_TX], + uint32(wallet_id), + SpendBundle([], G2Element()), + ) inst_rpc_client = NFTTransferRpcClient() # pylint: disable=no-value-for-parameter nft_coin_id = get_bytes32(2).hex() @@ -247,11 +274,12 @@ async def transfer_nft( "--reuse", ] # these are various things that should be in the output - assert_list = ["NFT transferred successfully with spend bundle: spend bundle here"] + assert STD_TX.spend_bundle is not None + assert_list = ["NFT transferred successfully", f"spend bundle: {STD_TX.spend_bundle.to_json_dict()}"] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { "transfer_nft": [ - (4, nft_coin_id, target_address, 500000000000, DEFAULT_TX_CONFIG.override(reuse_puzhash=True)) + (4, nft_coin_id, target_address, 500000000000, DEFAULT_TX_CONFIG.override(reuse_puzhash=True), True) ], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -331,9 +359,14 @@ async def set_nft_did( nft_coin_id: str, fee: int, tx_config: TXConfig, - ) -> dict[str, object]: + ) -> NFTSetNFTDIDResponse: self.add_to_log("set_nft_did", (wallet_id, did_id, nft_coin_id, fee, tx_config)) - return {"spend_bundle": "this is a spend bundle"} + return NFTSetNFTDIDResponse( + [STD_UTX], + [STD_TX], + uint32(wallet_id), + SpendBundle([], G2Element()), + ) inst_rpc_client = NFTSetDidRpcClient() # pylint: disable=no-value-for-parameter nft_coin_id = get_bytes32(2).hex() @@ -353,7 +386,8 @@ async def set_nft_did( "--reuse", ] # these are various things that should be in the output - assert_list = ["Transaction to set DID on NFT has been initiated with: this is a spend bundle"] + assert STD_TX.spend_bundle is not None + assert_list = [f"Transaction to set DID on NFT has been initiated with: {STD_TX.spend_bundle.to_json_dict()}"] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { "set_nft_did": [(4, did_id, nft_coin_id, 500000000000, DEFAULT_TX_CONFIG.override(reuse_puzhash=True))], diff --git a/chia/_tests/cmds/wallet/test_notifications.py b/chia/_tests/cmds/wallet/test_notifications.py index 4e161357559a..d0080e5a5161 100644 --- a/chia/_tests/cmds/wallet/test_notifications.py +++ b/chia/_tests/cmds/wallet/test_notifications.py @@ -21,9 +21,9 @@ def test_notifications_send(capsys: object, get_test_cli_clients: Tuple[TestRpcC # set RPC Client class NotificationsSendRpcClient(TestWalletRpcClient): async def send_notification( - self, target: bytes32, msg: bytes, amount: uint64, fee: uint64 = uint64(0) + self, target: bytes32, msg: bytes, amount: uint64, fee: uint64 = uint64(0), push: bool = True ) -> TransactionRecord: - self.add_to_log("send_notification", (target, msg, amount, fee)) + self.add_to_log("send_notification", (target, msg, amount, fee, push)) class FakeTransactionRecord: def __init__(self, name: str) -> None: @@ -53,7 +53,7 @@ def __init__(self, name: str) -> None: ] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { - "send_notification": [(target_ph, bytes(msg, "utf8"), 20000000, 1000000000)], + "send_notification": [(target_ph, bytes(msg, "utf8"), 20000000, 1000000000, True)], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) diff --git a/chia/_tests/cmds/wallet/test_tx_decorators.py b/chia/_tests/cmds/wallet/test_tx_decorators.py new file mode 100644 index 000000000000..8a79e9507828 --- /dev/null +++ b/chia/_tests/cmds/wallet/test_tx_decorators.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +from typing import Any, List + +import click +from click.testing import CliRunner + +from chia._tests.cmds.wallet.test_consts import STD_TX +from chia.cmds.cmds_util import TransactionBundle, tx_out_cmd +from chia.wallet.transaction_record import TransactionRecord + + +def test_tx_out_cmd() -> None: + @click.command() + @tx_out_cmd + def test_cmd(**kwargs: Any) -> List[TransactionRecord]: + with open("./temp.push", "w") as file: + file.write(str(kwargs["push"])) + return [STD_TX, STD_TX] + + runner: CliRunner = CliRunner() + with runner.isolated_filesystem(): + runner.invoke(test_cmd, ["--transaction-file", "./temp.transaction"]) + with open("./temp.transaction", "rb") as file: + assert TransactionBundle.from_bytes(file.read()) == TransactionBundle([STD_TX, STD_TX]) + with open("./temp.push") as file2: + assert file2.read() == "True" diff --git a/chia/_tests/cmds/wallet/test_vcs.py b/chia/_tests/cmds/wallet/test_vcs.py index 85374addd26c..6f96b2f5bc1e 100644 --- a/chia/_tests/cmds/wallet/test_vcs.py +++ b/chia/_tests/cmds/wallet/test_vcs.py @@ -6,12 +6,15 @@ from chia_rs import Coin from chia._tests.cmds.cmd_test_utils import TestRpcClients, TestWalletRpcClient, logType, run_cli_command_and_assert -from chia._tests.cmds.wallet.test_consts import FINGERPRINT_ARG, STD_TX, get_bytes32 +from chia._tests.cmds.wallet.test_consts import FINGERPRINT_ARG, STD_TX, STD_UTX, get_bytes32 +from chia.rpc.wallet_request_types import VCMintResponse, VCRevokeResponse, VCSpendResponse from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.bech32m import encode_puzzle_hash from chia.util.ints import uint32, uint64 +from chia.wallet.lineage_proof import LineageProof from chia.wallet.transaction_record import TransactionRecord from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG, TXConfig +from chia.wallet.vc_wallet.vc_drivers import VCLineageProof, VerifiedCredential from chia.wallet.vc_wallet.vc_store import VCRecord # VC Commands @@ -28,19 +31,26 @@ async def vc_mint( tx_config: TXConfig, target_address: Optional[bytes32] = None, fee: uint64 = uint64(0), - ) -> Tuple[VCRecord, List[TransactionRecord]]: - self.add_to_log("vc_mint", (did_id, tx_config, target_address, fee)) - - class FakeVC: - def __init__(self) -> None: - self.launcher_id = get_bytes32(3) - - def __getattr__(self, item: str) -> Any: - if item == "vc": - return self - - txs = [STD_TX] - return cast(VCRecord, FakeVC()), txs + push: bool = True, + ) -> VCMintResponse: + self.add_to_log("vc_mint", (did_id, tx_config, target_address, fee, push)) + + return VCMintResponse( + [STD_UTX], + [STD_TX], + VCRecord( + VerifiedCredential( + STD_TX.removals[0], + LineageProof(None, None, None), + VCLineageProof(None, None, None, None), + bytes32([3] * 32), + bytes32([0] * 32), + bytes32([1] * 32), + None, + ), + uint32(0), + ), + ) inst_rpc_client = VcsMintRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -55,7 +65,7 @@ def __getattr__(self, item: str) -> Any: f"Transaction {get_bytes32(2).hex()}", ] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) - expected_calls: logType = {"vc_mint": [(did_bytes, DEFAULT_TX_CONFIG, target_bytes, 500000000000)]} + expected_calls: logType = {"vc_mint": [(did_bytes, DEFAULT_TX_CONFIG, target_bytes, 500000000000, True)]} test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -108,9 +118,12 @@ async def vc_spend( new_proof_hash: Optional[bytes32] = None, provider_inner_puzhash: Optional[bytes32] = None, fee: uint64 = uint64(0), - ) -> List[TransactionRecord]: - self.add_to_log("vc_spend", (vc_id, tx_config, new_puzhash, new_proof_hash, provider_inner_puzhash, fee)) - return [STD_TX] + push: bool = True, + ) -> VCSpendResponse: + self.add_to_log( + "vc_spend", (vc_id, tx_config, new_puzhash, new_proof_hash, provider_inner_puzhash, fee, push) + ) + return VCSpendResponse([STD_UTX], [STD_TX]) inst_rpc_client = VcsUpdateProofsRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -136,7 +149,15 @@ async def vc_spend( run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { "vc_spend": [ - (vc_bytes, DEFAULT_TX_CONFIG.override(reuse_puzhash=True), target_ph, new_proof, None, uint64(500000000000)) + ( + vc_bytes, + DEFAULT_TX_CONFIG.override(reuse_puzhash=True), + target_ph, + new_proof, + None, + uint64(500000000000), + True, + ) ] } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -210,9 +231,10 @@ async def vc_revoke( vc_parent_id: bytes32, tx_config: TXConfig, fee: uint64 = uint64(0), - ) -> List[TransactionRecord]: - self.add_to_log("vc_revoke", (vc_parent_id, tx_config, fee)) - return [STD_TX] + push: bool = True, + ) -> VCRevokeResponse: + self.add_to_log("vc_revoke", (vc_parent_id, tx_config, fee, push)) + return VCRevokeResponse([STD_UTX], [STD_TX]) inst_rpc_client = VcsRevokeRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -233,8 +255,8 @@ async def vc_revoke( expected_calls: logType = { "vc_get": [(vc_id,)], "vc_revoke": [ - (parent_id, DEFAULT_TX_CONFIG.override(reuse_puzhash=True), uint64(500000000000)), - (parent_id, DEFAULT_TX_CONFIG.override(reuse_puzhash=True), uint64(500000000000)), + (parent_id, DEFAULT_TX_CONFIG.override(reuse_puzhash=True), uint64(500000000000), True), + (parent_id, DEFAULT_TX_CONFIG.override(reuse_puzhash=True), uint64(500000000000), True), ], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -251,6 +273,7 @@ async def crcat_approve_pending( min_amount_to_claim: uint64, tx_config: TXConfig, fee: uint64 = uint64(0), + push: bool = True, ) -> List[TransactionRecord]: self.add_to_log( "crcat_approve_pending", @@ -259,6 +282,7 @@ async def crcat_approve_pending( min_amount_to_claim, tx_config, fee, + push, ), ) return [STD_TX] @@ -296,6 +320,7 @@ async def crcat_approve_pending( reuse_puzhash=True, ), uint64(500000000000), + True, ) ], "get_wallets": [(None,)], diff --git a/chia/_tests/cmds/wallet/test_wallet.py b/chia/_tests/cmds/wallet/test_wallet.py index eaf58f957394..e47d1e130770 100644 --- a/chia/_tests/cmds/wallet/test_wallet.py +++ b/chia/_tests/cmds/wallet/test_wallet.py @@ -3,11 +3,12 @@ import datetime import os from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple, Union, cast +from typing import Any, Dict, List, Optional, Tuple, Union import importlib_resources import pytest from chia_rs import Coin, G2Element +from click.testing import CliRunner from chia._tests.cmds.cmd_test_utils import TestRpcClients, TestWalletRpcClient, logType, run_cli_command_and_assert from chia._tests.cmds.wallet.test_consts import ( @@ -15,11 +16,20 @@ FINGERPRINT, FINGERPRINT_ARG, STD_TX, + STD_UTX, WALLET_ID, WALLET_ID_ARG, bytes32_hexstr, get_bytes32, ) +from chia.cmds.cmds_util import TransactionBundle +from chia.rpc.wallet_request_types import ( + CancelOfferResponse, + CATSpendResponse, + CreateOfferForIDsResponse, + SendTransactionResponse, + TakeOfferResponse, +) from chia.server.outbound_message import NodeType from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 @@ -304,7 +314,8 @@ async def send_transaction( fee: uint64 = uint64(0), memos: Optional[List[str]] = None, puzzle_decorator_override: Optional[List[Dict[str, Union[str, int, bool]]]] = None, - ) -> TransactionRecord: + push: bool = True, + ) -> SendTransactionResponse: self.add_to_log( "send_transaction", ( @@ -315,8 +326,10 @@ async def send_transaction( fee, memos, puzzle_decorator_override, + push, ), ) + name = get_bytes32(2) tx_rec = TransactionRecord( confirmed_at_height=uint32(1), created_at_time=uint64(1234), @@ -332,11 +345,11 @@ async def send_transaction( sent_to=[("aaaaa", uint8(1), None)], trade_id=None, type=uint32(TransactionType.OUTGOING_CLAWBACK.value), - name=get_bytes32(2), + name=name, memos=[(get_bytes32(3), [bytes([4] * 32)])], valid_times=ConditionValidTimes(), ) - return tx_rec + return SendTransactionResponse([STD_UTX], [STD_TX], tx_rec, name) async def cat_spend( self, @@ -349,7 +362,8 @@ async def cat_spend( additions: Optional[List[Dict[str, Any]]] = None, removals: Optional[List[Coin]] = None, cat_discrepancy: Optional[Tuple[int, Program, Program]] = None, # (extra_delta, tail_reveal, tail_solution) - ) -> TransactionRecord: + push: bool = True, + ) -> CATSpendResponse: self.add_to_log( "cat_spend", ( @@ -362,9 +376,10 @@ async def cat_spend( additions, removals, cat_discrepancy, + push, ), ) - return STD_TX + return CATSpendResponse([STD_UTX], [STD_TX], STD_TX, STD_TX.name) inst_rpc_client = SendWalletRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -393,8 +408,19 @@ async def cat_spend( "Transaction submitted to nodes: [{'peer_id': 'aaaaa'", f"-f 789101 -tx 0x{get_bytes32(2).hex()}", ] - run_cli_command_and_assert(capsys, root_dir, command_args + [FINGERPRINT_ARG], assert_list) - run_cli_command_and_assert(capsys, root_dir, command_args + [CAT_FINGERPRINT_ARG], cat_assert_list) + with CliRunner().isolated_filesystem(): + run_cli_command_and_assert( + capsys, root_dir, command_args + [FINGERPRINT_ARG] + ["--transaction-file=temp"], assert_list + ) + run_cli_command_and_assert( + capsys, root_dir, command_args + [CAT_FINGERPRINT_ARG] + ["--transaction-file=temp2"], cat_assert_list + ) + + with open("temp", "rb") as file: + assert TransactionBundle.from_bytes(file.read()) == TransactionBundle([STD_TX]) + with open("temp2", "rb") as file: + assert TransactionBundle.from_bytes(file.read()) == TransactionBundle([STD_TX]) + # these are various things that should be in the output expected_calls: logType = { "get_wallets": [(None,), (None,)], @@ -413,6 +439,7 @@ async def cat_spend( 500000000000, ["0x6262626262626262626262626262626262626262626262626262626262626262"], [{"decorator": "CLAWBACK", "clawback_timelock": 60}], + True, ) ], "cat_spend": [ @@ -432,6 +459,7 @@ async def cat_spend( None, None, None, + True, ) ], "get_transaction": [(get_bytes32(2),), (get_bytes32(2),)], @@ -480,10 +508,21 @@ async def spend_clawback_coins( coin_ids: List[bytes32], fee: int = 0, force: bool = False, + push: bool = True, ) -> Dict[str, Any]: - self.add_to_log("spend_clawback_coins", (coin_ids, fee, force)) + self.add_to_log("spend_clawback_coins", (coin_ids, fee, force, push)) tx_hex_list = [get_bytes32(6).hex(), get_bytes32(7).hex(), get_bytes32(8).hex()] - return {"transaction_ids": tx_hex_list} + return { + "transaction_ids": tx_hex_list, + "transactions": [ + STD_TX.to_json_dict_convenience( + { + "selected_network": "mainnet", + "network_overrides": {"config": {"mainnet": {"address_prefix": "xch"}}}, + } + ) + ], + } inst_rpc_client = ClawbackWalletRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -501,7 +540,7 @@ async def spend_clawback_coins( run_cli_command_and_assert(capsys, root_dir, command_args, ["transaction_ids", str(r_tx_ids_hex)]) # these are various things that should be in the output expected_calls: logType = { - "spend_clawback_coins": [(tx_ids, 500000000000, False)], + "spend_clawback_coins": [(tx_ids, 500000000000, False, True)], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -695,17 +734,13 @@ async def create_offer_for_ids( solver: Optional[Dict[str, Any]] = None, fee: uint64 = uint64(0), validate_only: bool = False, - ) -> Tuple[Optional[Offer], TradeRecord]: + ) -> CreateOfferForIDsResponse: self.add_to_log( "create_offer_for_ids", (offer_dict, tx_config, driver_dict, solver, fee, validate_only), ) - class FakeOffer: - def to_bech32(self) -> str: - return "offer string" - - created_offer = cast(Offer, FakeOffer()) + created_offer = Offer({}, SpendBundle([], G2Element()), {}) trade_offer: TradeRecord = TradeRecord( confirmed_at_index=uint32(0), accepted_at_time=None, @@ -721,7 +756,7 @@ def to_bech32(self) -> str: valid_times=ConditionValidTimes(), ) - return created_offer, trade_offer + return CreateOfferForIDsResponse([STD_UTX], [STD_TX], created_offer, trade_offer) inst_rpc_client = MakeOfferRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -928,21 +963,27 @@ async def take_offer( tx_config: TXConfig, solver: Optional[Dict[str, Any]] = None, fee: uint64 = uint64(0), - ) -> TradeRecord: - self.add_to_log("take_offer", (offer, tx_config, solver, fee)) - return TradeRecord( - confirmed_at_index=uint32(0), - accepted_at_time=uint64(123456789), - created_at_time=uint64(12345678), - is_my_offer=False, - sent=uint32(1), - sent_to=[("aaaaa", uint8(1), None)], - offer=bytes(offer), - taken_offer=None, - coins_of_interest=offer.get_involved_coins(), - trade_id=offer.name(), - status=uint32(TradeStatus.PENDING_ACCEPT.value), - valid_times=ConditionValidTimes(), + push: bool = True, + ) -> TakeOfferResponse: + self.add_to_log("take_offer", (offer, tx_config, solver, fee, push)) + return TakeOfferResponse( + [STD_UTX], + [STD_TX], + offer, + TradeRecord( + confirmed_at_index=uint32(0), + accepted_at_time=uint64(123456789), + created_at_time=uint64(12345678), + is_my_offer=False, + sent=uint32(1), + sent_to=[("aaaaa", uint8(1), None)], + offer=bytes(offer), + taken_offer=None, + coins_of_interest=offer.get_involved_coins(), + trade_id=offer.name(), + status=uint32(TradeStatus.PENDING_ACCEPT.value), + valid_times=ConditionValidTimes(), + ), ) inst_rpc_client = TakeOfferRpcClient() # pylint: disable=no-value-for-parameter @@ -970,7 +1011,7 @@ async def take_offer( (cat2,), (bytes32.from_hexstr("accce8e1c71b56624f2ecaeff5af57eac41365080449904d0717bd333c04806d"),), ], - "take_offer": [(Offer.from_bech32(test_offer_file_bech32), DEFAULT_TX_CONFIG, None, 500000000000)], + "take_offer": [(Offer.from_bech32(test_offer_file_bech32), DEFAULT_TX_CONFIG, None, 500000000000, True)], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -999,10 +1040,15 @@ async def get_offer(self, trade_id: bytes32, file_contents: bool = False) -> Tra ) async def cancel_offer( - self, trade_id: bytes32, tx_config: TXConfig, fee: uint64 = uint64(0), secure: bool = True - ) -> None: - self.add_to_log("cancel_offer", (trade_id, tx_config, fee, secure)) - return None + self, + trade_id: bytes32, + tx_config: TXConfig, + fee: uint64 = uint64(0), + secure: bool = True, + push: bool = True, + ) -> CancelOfferResponse: + self.add_to_log("cancel_offer", (trade_id, tx_config, fee, secure, push)) + return CancelOfferResponse([STD_UTX], [STD_TX]) inst_rpc_client = CancelOfferRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -1022,7 +1068,7 @@ async def cancel_offer( run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { "get_offer": [(test_offer_id_bytes, True)], - "cancel_offer": [(test_offer_id_bytes, DEFAULT_TX_CONFIG, 500000000000, True)], + "cancel_offer": [(test_offer_id_bytes, DEFAULT_TX_CONFIG, 500000000000, True, True)], "cat_asset_id_to_name": [ (cat1,), (cat2,), diff --git a/chia/_tests/pools/test_pool_rpc.py b/chia/_tests/pools/test_pool_rpc.py index 3be906e6893f..30d6d992234f 100644 --- a/chia/_tests/pools/test_pool_rpc.py +++ b/chia/_tests/pools/test_pool_rpc.py @@ -460,9 +460,11 @@ async def test_absorb_self( assert len(await wallet_node.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(2)) == 0 - tr: TransactionRecord = await client.send_transaction( - 1, uint64(100), encode_puzzle_hash(status.p2_singleton_puzzle_hash, "txch"), DEFAULT_TX_CONFIG - ) + tr: TransactionRecord = ( + await client.send_transaction( + 1, uint64(100), encode_puzzle_hash(status.p2_singleton_puzzle_hash, "txch"), DEFAULT_TX_CONFIG + ) + ).transaction await full_node_api.wait_transaction_records_entered_mempool(records=[tr]) await full_node_api.farm_blocks_to_puzzlehash(count=2, farm_to=our_ph, guarantee_transaction_blocks=True) diff --git a/chia/_tests/wallet/cat_wallet/test_cat_wallet.py b/chia/_tests/wallet/cat_wallet/test_cat_wallet.py index d9432adacc99..c81506dc3a2c 100644 --- a/chia/_tests/wallet/cat_wallet/test_cat_wallet.py +++ b/chia/_tests/wallet/cat_wallet/test_cat_wallet.py @@ -1014,7 +1014,7 @@ async def test_cat_change_detection( cat_amount_0 = uint64(100) cat_amount_1 = uint64(5) - tx = await client_0.send_transaction(1, cat_amount_0, addr, DEFAULT_TX_CONFIG) + tx = (await client_0.send_transaction(1, cat_amount_0, addr, DEFAULT_TX_CONFIG)).transaction spend_bundle = tx.spend_bundle assert spend_bundle is not None diff --git a/chia/_tests/wallet/cat_wallet/test_trades.py b/chia/_tests/wallet/cat_wallet/test_trades.py index e152761f4612..d54c649c4f45 100644 --- a/chia/_tests/wallet/cat_wallet/test_trades.py +++ b/chia/_tests/wallet/cat_wallet/test_trades.py @@ -223,12 +223,16 @@ async def test_cat_trades( ) # Mint some VCs that can spend the CR-CATs - vc_record_maker, _ = await client_maker.vc_mint( - did_id_maker, wallet_environments.tx_config, target_address=await wallet_maker.get_new_puzzlehash() - ) - vc_record_taker, _ = await client_taker.vc_mint( - did_id_taker, wallet_environments.tx_config, target_address=await wallet_taker.get_new_puzzlehash() - ) + vc_record_maker = ( + await client_maker.vc_mint( + did_id_maker, wallet_environments.tx_config, target_address=await wallet_maker.get_new_puzzlehash() + ) + ).vc_record + vc_record_taker = ( + await client_taker.vc_mint( + did_id_taker, wallet_environments.tx_config, target_address=await wallet_taker.get_new_puzzlehash() + ) + ).vc_record await wallet_environments.process_pending_states( [ # Balance checking for this scenario is covered in tests/wallet/vc_wallet/test_vc_lifecycle diff --git a/chia/_tests/wallet/conftest.py b/chia/_tests/wallet/conftest.py index 5fc63153b620..24ac9b054391 100644 --- a/chia/_tests/wallet/conftest.py +++ b/chia/_tests/wallet/conftest.py @@ -165,10 +165,11 @@ async def wallet_environments( wallet_states: List[WalletState] = [] for service, blocks_needed in zip(wallet_services, request.param["blocks_needed"]): - await full_node[0]._api.farm_blocks_to_wallet( - count=blocks_needed, wallet=service._node.wallet_state_manager.main_wallet - ) - await full_node[0]._api.wait_for_wallet_synced(wallet_node=service._node, timeout=20) + if blocks_needed > 0: + await full_node[0]._api.farm_blocks_to_wallet( + count=blocks_needed, wallet=service._node.wallet_state_manager.main_wallet + ) + await full_node[0]._api.wait_for_wallet_synced(wallet_node=service._node, timeout=20) wallet_states.append( WalletState( Balance( diff --git a/chia/_tests/wallet/dao_wallet/test_dao_wallets.py b/chia/_tests/wallet/dao_wallet/test_dao_wallets.py index e8b7664a36f1..965787a5d2d9 100644 --- a/chia/_tests/wallet/dao_wallet/test_dao_wallets.py +++ b/chia/_tests/wallet/dao_wallet/test_dao_wallets.py @@ -1312,7 +1312,6 @@ async def test_dao_rpc_api(self_hostname: str, two_wallet_nodes: Any, trusted: A "fee": fee, } ) - assert create_proposal["success"] txs = [TransactionRecord.from_json_dict(create_proposal["tx"])] await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) @@ -1402,7 +1401,6 @@ async def test_dao_rpc_api(self_hostname: str, two_wallet_nodes: Any, trusted: A "fee": fee, } ) - assert mint_proposal["success"] txs = [TransactionRecord.from_json_dict(mint_proposal["tx"])] await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) @@ -1501,7 +1499,6 @@ async def test_dao_rpc_api(self_hostname: str, two_wallet_nodes: Any, trusted: A "fee": fee, } ) - assert update_proposal["success"] txs = [TransactionRecord.from_json_dict(update_proposal["tx"])] await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) @@ -1672,7 +1669,7 @@ async def test_dao_rpc_client( fee = uint64(10000) # create new dao - dao_wallet_dict_0 = await client_0.create_new_dao_wallet( + dao_wallet_res_0 = await client_0.create_new_dao_wallet( mode="new", tx_config=DEFAULT_TX_CONFIG, dao_rules=dao_rules.to_json_dict(), @@ -1680,10 +1677,8 @@ async def test_dao_rpc_client( filter_amount=filter_amount, name="DAO WALLET 0", ) - assert dao_wallet_dict_0["success"] - dao_id_0 = dao_wallet_dict_0["wallet_id"] - treasury_id_hex = dao_wallet_dict_0["treasury_id"] - cat_wallet_0 = wallet_node_0.wallet_state_manager.wallets[dao_wallet_dict_0["cat_wallet_id"]] + dao_id_0 = dao_wallet_res_0.wallet_id + cat_wallet_0 = wallet_node_0.wallet_state_manager.wallets[dao_wallet_res_0.cat_wallet_id] txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) @@ -1694,8 +1689,8 @@ async def test_dao_rpc_client( # Create a new standard cat for treasury funds new_cat_amt = uint64(100000) - free_coins_res = await client_0.create_new_cat_and_wallet(new_cat_amt, test=True) - new_cat_wallet_id = free_coins_res["wallet_id"] + new_cat_res = await client_0.create_new_cat_and_wallet(new_cat_amt, test=True) + new_cat_wallet_id = new_cat_res["wallet_id"] new_cat_wallet = wallet_node_0.wallet_state_manager.wallets[new_cat_wallet_id] txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() @@ -1704,25 +1699,20 @@ async def test_dao_rpc_client( await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) # join dao - dao_wallet_dict_1 = await client_1.create_new_dao_wallet( + dao_wallet_res_1 = await client_1.create_new_dao_wallet( mode="existing", tx_config=DEFAULT_TX_CONFIG, - treasury_id=treasury_id_hex, + treasury_id=dao_wallet_res_0.treasury_id, filter_amount=filter_amount, name="DAO WALLET 1", ) - assert dao_wallet_dict_1["success"] - dao_id_1 = dao_wallet_dict_1["wallet_id"] - cat_wallet_1 = wallet_node_1.wallet_state_manager.wallets[dao_wallet_dict_1["cat_wallet_id"]] + dao_id_1 = dao_wallet_res_1.wallet_id + cat_wallet_1 = wallet_node_1.wallet_state_manager.wallets[dao_wallet_res_1.cat_wallet_id] # fund treasury xch_funds = uint64(10000000000) - funding_tx = await client_0.dao_add_funds_to_treasury(dao_id_0, 1, xch_funds, DEFAULT_TX_CONFIG) - cat_funding_tx = await client_0.dao_add_funds_to_treasury( - dao_id_0, new_cat_wallet_id, new_cat_amt, DEFAULT_TX_CONFIG - ) - assert funding_tx["success"] - assert cat_funding_tx["success"] + await client_0.dao_add_funds_to_treasury(dao_id_0, 1, xch_funds, DEFAULT_TX_CONFIG) + await client_0.dao_add_funds_to_treasury(dao_id_0, new_cat_wallet_id, new_cat_amt, DEFAULT_TX_CONFIG) txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) @@ -1748,7 +1738,7 @@ async def test_dao_rpc_client( # send cats to wallet 1 await client_0.cat_spend( - wallet_id=dao_wallet_dict_0["cat_wallet_id"], + wallet_id=dao_wallet_res_0.cat_wallet_id, tx_config=DEFAULT_TX_CONFIG, amount=cat_amt, inner_address=encode_puzzle_hash(ph_1, "xch"), @@ -1764,10 +1754,8 @@ async def test_dao_rpc_client( await time_out_assert(20, cat_wallet_1.get_confirmed_balance, cat_amt) # send cats to lockup - lockup_0 = await client_0.dao_send_to_lockup(dao_id_0, cat_amt, DEFAULT_TX_CONFIG) - lockup_1 = await client_1.dao_send_to_lockup(dao_id_1, cat_amt, DEFAULT_TX_CONFIG) - assert lockup_0["success"] - assert lockup_1["success"] + await client_0.dao_send_to_lockup(dao_id_0, cat_amt, DEFAULT_TX_CONFIG) + await client_1.dao_send_to_lockup(dao_id_1, cat_amt, DEFAULT_TX_CONFIG) txs_0 = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() txs_1 = await wallet_1.wallet_state_manager.tx_store.get_all_unconfirmed() @@ -1781,7 +1769,7 @@ async def test_dao_rpc_client( {"puzzle_hash": ph_0.hex(), "amount": 1000}, {"puzzle_hash": ph_0.hex(), "amount": 10000, "asset_id": new_cat_asset_id.hex()}, ] - proposal = await client_0.dao_create_proposal( + await client_0.dao_create_proposal( wallet_id=dao_id_0, proposal_type="spend", tx_config=DEFAULT_TX_CONFIG, @@ -1789,7 +1777,6 @@ async def test_dao_rpc_client( vote_amount=cat_amt, fee=fee, ) - assert proposal["success"] txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) @@ -1801,7 +1788,7 @@ async def test_dao_rpc_client( proposal_id_hex = props["proposals"][0]["proposal_id"] # create an update proposal - update_proposal = await client_1.dao_create_proposal( + await client_1.dao_create_proposal( wallet_id=dao_id_1, proposal_type="update", tx_config=DEFAULT_TX_CONFIG, @@ -1809,7 +1796,6 @@ async def test_dao_rpc_client( new_dao_rules={"proposal_timelock": uint64(10)}, fee=fee, ) - assert update_proposal["success"] txs = await wallet_1.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_1, timeout=60) @@ -1817,7 +1803,7 @@ async def test_dao_rpc_client( # create a mint proposal mint_addr = await client_1.get_next_address(wallet_id=wallet_1.id(), new_address=False) - mint_proposal = await client_1.dao_create_proposal( + await client_1.dao_create_proposal( wallet_id=dao_id_1, proposal_type="mint", tx_config=DEFAULT_TX_CONFIG, @@ -1826,14 +1812,13 @@ async def test_dao_rpc_client( cat_target_address=mint_addr, fee=fee, ) - assert mint_proposal["success"] txs = await wallet_1.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_1, timeout=60) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) # vote spend - vote = await client_1.dao_vote_on_proposal( + await client_1.dao_vote_on_proposal( wallet_id=dao_id_1, proposal_id=proposal_id_hex, vote_amount=cat_amt, @@ -1841,7 +1826,6 @@ async def test_dao_rpc_client( is_yes_vote=True, fee=fee, ) - assert vote["success"] txs = await wallet_1.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_1, timeout=60) @@ -1885,7 +1869,7 @@ async def test_dao_rpc_client( close = await client_0.dao_close_proposal( wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False, fee=fee ) - tx = TransactionRecord.from_json_dict(close["tx"]) + tx = close.tx await full_node_api.wait_transaction_records_entered_mempool(records=[tx], timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) @@ -1920,8 +1904,7 @@ async def test_dao_rpc_client( close = await client_0.dao_close_proposal( wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False, fee=fee ) - assert close["success"] - tx = TransactionRecord.from_json_dict(close["tx"]) + tx = close.tx await full_node_api.wait_transaction_records_entered_mempool(records=[tx], timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) @@ -1934,7 +1917,7 @@ async def test_dao_rpc_client( await rpc_state( 20, client_1.get_wallet_balance, - [dao_wallet_dict_1["cat_wallet_id"]], + [dao_wallet_res_1.cat_wallet_id], lambda x: x["confirmed_wallet_balance"], 100, ) @@ -1947,8 +1930,7 @@ async def test_dao_rpc_client( close = await client_0.dao_close_proposal( wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False, fee=fee ) - assert close["success"] - tx = TransactionRecord.from_json_dict(close["tx"]) + tx = close.tx await full_node_api.wait_transaction_records_entered_mempool(records=[tx], timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) @@ -1967,18 +1949,16 @@ async def test_dao_rpc_client( free_coins_res = await client_0.dao_free_coins_from_finished_proposals( wallet_id=dao_id_0, tx_config=DEFAULT_TX_CONFIG ) - assert free_coins_res["success"] - free_coins_tx = TransactionRecord.from_json_dict(free_coins_res["tx"]) + free_coins_tx = free_coins_res.tx await full_node_api.wait_transaction_records_entered_mempool(records=[free_coins_tx], timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) - bal = await client_0.get_wallet_balance(dao_wallet_dict_0["dao_cat_wallet_id"]) + bal = await client_0.get_wallet_balance(dao_wallet_res_0.dao_cat_wallet_id) assert bal["confirmed_wallet_balance"] == cat_amt exit = await client_0.dao_exit_lockup(dao_id_0, tx_config=DEFAULT_TX_CONFIG) - assert exit["success"] - exit_tx = TransactionRecord.from_json_dict(exit["tx"]) + exit_tx = exit.tx await full_node_api.wait_transaction_records_entered_mempool(records=[exit_tx], timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) @@ -1986,14 +1966,14 @@ async def test_dao_rpc_client( await rpc_state( 20, client_0.get_wallet_balance, - [dao_wallet_dict_0["cat_wallet_id"]], + [dao_wallet_res_0.cat_wallet_id], lambda x: x["confirmed_wallet_balance"], cat_amt, ) # coverage tests for filter amount and get treasury id treasury_id_resp = await client_0.dao_get_treasury_id(wallet_id=dao_id_0) - assert treasury_id_resp["treasury_id"] == treasury_id_hex + assert treasury_id_resp["treasury_id"] == "0x" + dao_wallet_res_0.treasury_id.hex() filter_amount_resp = await client_0.dao_adjust_filter_level(wallet_id=dao_id_0, filter_level=30) assert filter_amount_resp["dao_info"]["filter_below_vote_amount"] == 30 @@ -2077,7 +2057,7 @@ async def test_dao_complex_spends( filter_amount = uint64(1) # create new dao - dao_wallet_dict_0 = await client_0.create_new_dao_wallet( + dao_wallet_res_0 = await client_0.create_new_dao_wallet( mode="new", tx_config=DEFAULT_TX_CONFIG, dao_rules=dao_rules.to_json_dict(), @@ -2085,10 +2065,9 @@ async def test_dao_complex_spends( filter_amount=filter_amount, name="DAO WALLET 0", ) - assert dao_wallet_dict_0["success"] - dao_id_0 = dao_wallet_dict_0["wallet_id"] - treasury_id_hex = dao_wallet_dict_0["treasury_id"] - cat_wallet_0 = wallet_node_0.wallet_state_manager.wallets[dao_wallet_dict_0["cat_wallet_id"]] + dao_id_0 = dao_wallet_res_0.wallet_id + treasury_id = dao_wallet_res_0.treasury_id + cat_wallet_0 = wallet_node_0.wallet_state_manager.wallets[dao_wallet_res_0.cat_wallet_id] txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) @@ -2119,29 +2098,25 @@ async def test_dao_complex_spends( await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) # join dao - dao_wallet_dict_1 = await client_1.create_new_dao_wallet( + dao_wallet_res_1 = await client_1.create_new_dao_wallet( mode="existing", tx_config=DEFAULT_TX_CONFIG, - treasury_id=treasury_id_hex, + treasury_id=treasury_id, filter_amount=filter_amount, name="DAO WALLET 1", ) - assert dao_wallet_dict_1["success"] - dao_id_1 = dao_wallet_dict_1["wallet_id"] + dao_id_1 = dao_wallet_res_1.wallet_id # fund treasury so there are multiple coins for each asset xch_funds = uint64(10000000000) for _ in range(4): - funding_tx = await client_0.dao_add_funds_to_treasury(dao_id_0, 1, uint64(xch_funds / 4), DEFAULT_TX_CONFIG) - cat_funding_tx = await client_0.dao_add_funds_to_treasury( + await client_0.dao_add_funds_to_treasury(dao_id_0, 1, uint64(xch_funds / 4), DEFAULT_TX_CONFIG) + await client_0.dao_add_funds_to_treasury( dao_id_0, new_cat_wallet_id, uint64(new_cat_amt / 4), DEFAULT_TX_CONFIG ) - cat_funding_tx_2 = await client_0.dao_add_funds_to_treasury( + await client_0.dao_add_funds_to_treasury( dao_id_0, new_cat_wallet_id_2, uint64(new_cat_amt / 4), DEFAULT_TX_CONFIG ) - assert funding_tx["success"] - assert cat_funding_tx["success"] - assert cat_funding_tx_2["success"] txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) @@ -2172,8 +2147,7 @@ async def test_dao_complex_spends( await client_1.create_wallet_for_existing_cat(new_cat_asset_id_2) # send cats to lockup - lockup_0 = await client_0.dao_send_to_lockup(dao_id_0, cat_amt, DEFAULT_TX_CONFIG) - assert lockup_0["success"] + await client_0.dao_send_to_lockup(dao_id_0, cat_amt, DEFAULT_TX_CONFIG) txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) @@ -2187,14 +2161,13 @@ async def test_dao_complex_spends( {"puzzle_hash": ph_0.hex(), "amount": xch_funds / 4}, {"puzzle_hash": ph_1.hex(), "amount": xch_funds / 4}, ] - proposal = await client_0.dao_create_proposal( + await client_0.dao_create_proposal( wallet_id=dao_id_0, proposal_type="spend", tx_config=DEFAULT_TX_CONFIG, additions=additions, vote_amount=cat_amt, ) - assert proposal["success"] txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) @@ -2203,10 +2176,9 @@ async def test_dao_complex_spends( props = await client_1.dao_get_proposals(dao_id_1) proposal_id_hex = props["proposals"][-1]["proposal_id"] - close = await client_0.dao_close_proposal( + await client_0.dao_close_proposal( wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False ) - assert close["success"] txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) @@ -2237,14 +2209,13 @@ async def test_dao_complex_spends( {"puzzle_hash": ph_0.hex(), "amount": cat_spend_amt, "asset_id": new_cat_asset_id.hex()}, {"puzzle_hash": ph_0.hex(), "amount": cat_spend_amt, "asset_id": new_cat_asset_id_2.hex()}, ] - proposal = await client_0.dao_create_proposal( + await client_0.dao_create_proposal( wallet_id=dao_id_0, proposal_type="spend", tx_config=DEFAULT_TX_CONFIG, additions=additions, vote_amount=cat_amt, ) - assert proposal["success"] txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) @@ -2253,10 +2224,9 @@ async def test_dao_complex_spends( props = await client_1.dao_get_proposals(dao_id_1) proposal_id_hex = props["proposals"][-1]["proposal_id"] - close = await client_0.dao_close_proposal( + await client_0.dao_close_proposal( wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False ) - assert close["success"] txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) @@ -2303,14 +2273,13 @@ async def test_dao_complex_spends( {"puzzle_hash": ph_0.hex(), "amount": xch_funds / 4}, {"puzzle_hash": ph_1.hex(), "amount": xch_funds / 4}, ] - proposal = await client_0.dao_create_proposal( + await client_0.dao_create_proposal( wallet_id=dao_id_0, proposal_type="spend", tx_config=DEFAULT_TX_CONFIG, additions=additions, vote_amount=cat_amt, ) - assert proposal["success"] txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) @@ -2319,13 +2288,12 @@ async def test_dao_complex_spends( props = await client_0.dao_get_proposals(dao_id_0) proposal_id_hex = props["proposals"][-1]["proposal_id"] - close = await client_0.dao_close_proposal( + await client_0.dao_close_proposal( wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False, ) - assert close["success"] txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) @@ -2680,7 +2648,7 @@ async def test_dao_cat_exits( # create new dao await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) - dao_wallet_dict_0 = await client_0.create_new_dao_wallet( + dao_wallet_res_0 = await client_0.create_new_dao_wallet( mode="new", tx_config=DEFAULT_TX_CONFIG, dao_rules=dao_rules.to_json_dict(), @@ -2688,11 +2656,9 @@ async def test_dao_cat_exits( filter_amount=filter_amount, name="DAO WALLET 0", ) - assert dao_wallet_dict_0["success"] - dao_id_0 = dao_wallet_dict_0["wallet_id"] - # treasury_id_hex = dao_wallet_dict_0["treasury_id"] - cat_wallet_0 = wallet_node_0.wallet_state_manager.wallets[dao_wallet_dict_0["cat_wallet_id"]] - dao_cat_wallet_0 = wallet_node_0.wallet_state_manager.wallets[dao_wallet_dict_0["dao_cat_wallet_id"]] + dao_id_0 = dao_wallet_res_0.wallet_id + cat_wallet_0 = wallet_node_0.wallet_state_manager.wallets[dao_wallet_res_0.cat_wallet_id] + dao_cat_wallet_0 = wallet_node_0.wallet_state_manager.wallets[dao_wallet_res_0.dao_cat_wallet_id] txs = await wallet_0.wallet_state_manager.tx_store.get_all_unconfirmed() for tx in txs: await full_node_api.wait_transaction_records_entered_mempool(records=[tx], timeout=60) @@ -2705,8 +2671,7 @@ async def test_dao_cat_exits( # fund treasury xch_funds = uint64(10000000000) funding_tx = await client_0.dao_add_funds_to_treasury(dao_id_0, 1, xch_funds, DEFAULT_TX_CONFIG) - assert funding_tx["success"] - tx = TransactionRecord.from_json_dict(funding_tx["tx"]) + tx = funding_tx.tx await full_node_api.wait_transaction_records_entered_mempool(records=[tx], timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) @@ -2715,8 +2680,7 @@ async def test_dao_cat_exits( # send cats to lockup lockup_0 = await client_0.dao_send_to_lockup(dao_id_0, cat_amt, DEFAULT_TX_CONFIG) - assert lockup_0["success"] - txs = [TransactionRecord.from_json_dict(x) for x in lockup_0["txs"]] + txs = lockup_0.txs await full_node_api.wait_transaction_records_entered_mempool(records=txs, timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) @@ -2736,8 +2700,7 @@ async def test_dao_cat_exits( vote_amount=cat_amt, fee=fee, ) - assert proposal["success"] - tx = TransactionRecord.from_json_dict(proposal["tx"]) + tx = proposal.tx await full_node_api.wait_transaction_records_entered_mempool(records=[tx], timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) @@ -2763,8 +2726,7 @@ async def test_dao_cat_exits( close = await client_0.dao_close_proposal( wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False, fee=fee ) - assert close["success"] - tx = TransactionRecord.from_json_dict(close["tx"]) + tx = close.tx await full_node_api.wait_transaction_records_entered_mempool(records=[tx], timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) @@ -2774,8 +2736,7 @@ async def test_dao_cat_exits( # free locked cats from finished proposal res = await client_0.dao_free_coins_from_finished_proposals(wallet_id=dao_id_0, tx_config=DEFAULT_TX_CONFIG) - assert res["success"] - tx = TransactionRecord.from_json_dict(res["tx"]) + tx = res.tx await full_node_api.wait_transaction_records_entered_mempool(records=[tx], timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) @@ -2784,7 +2745,7 @@ async def test_dao_cat_exits( assert dao_cat_wallet_0.dao_cat_info.locked_coins[0].active_votes == [] exit = await client_0.dao_exit_lockup(dao_id_0, DEFAULT_TX_CONFIG) - exit_tx = TransactionRecord.from_json_dict(exit["tx"]) + exit_tx = exit.tx await full_node_api.wait_transaction_records_entered_mempool(records=[exit_tx], timeout=60) await full_node_api.process_all_wallet_transactions(wallet_0, timeout=60) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_0, wallet_node_1], timeout=30) diff --git a/chia/_tests/wallet/did_wallet/test_did.py b/chia/_tests/wallet/did_wallet/test_did.py index 33d65afe38aa..f6f589facc1c 100644 --- a/chia/_tests/wallet/did_wallet/test_did.py +++ b/chia/_tests/wallet/did_wallet/test_did.py @@ -998,7 +998,6 @@ async def test_message_spend(self, self_hostname, two_wallet_nodes, trusted): response = await api_0.did_message_spend( {"wallet_id": did_wallet_1.wallet_id, "coin_announcements": ["0abc"], "puzzle_announcements": ["0def"]} ) - assert "spend_bundle" in response spend = response["spend_bundle"].coin_spends[0] conditions = conditions_dict_for_solution( spend.puzzle_reveal, spend.solution, wallet.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM diff --git a/chia/_tests/wallet/nft_wallet/test_nft_bulk_mint.py b/chia/_tests/wallet/nft_wallet/test_nft_bulk_mint.py index fa61d359793e..c848243f78be 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_bulk_mint.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_bulk_mint.py @@ -1,7 +1,7 @@ from __future__ import annotations import random -from typing import Any, Dict +from typing import Any import pytest @@ -282,7 +282,7 @@ async def test_nft_mint_from_did_rpc( nft_ids = set() for i in range(0, n, chunk): await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_maker, timeout=20) - resp: Dict[str, Any] = await client.nft_mint_bulk( + resp = await client.nft_mint_bulk( wallet_id=nft_wallet_maker["wallet_id"], metadata_list=metadata_list[i : i + chunk], target_list=target_list[i : i + chunk], @@ -298,19 +298,18 @@ async def test_nft_mint_from_did_rpc( fee=fee, tx_config=DEFAULT_TX_CONFIG, ) - assert resp["success"] - sb: SpendBundle = SpendBundle.from_json_dict(resp["spend_bundle"]) + sb: SpendBundle = resp.spend_bundle did_lineage_parent = [cn for cn in sb.removals() if cn.name() == did_coin.name()][0].parent_coin_info.hex() did_coin = [cn for cn in sb.additions() if (cn.parent_coin_info == did_coin.name()) and (cn.amount == 1)][0] spends.append(sb) xch_adds = [c for c in sb.additions() if c.puzzle_hash == funding_coin.puzzle_hash] assert len(xch_adds) == 1 next_coin = xch_adds[0] - for nft_id in resp["nft_id_list"]: + for nft_id in resp.nft_id_list: nft_ids.add(decode_puzzle_hash(nft_id)) for sb in spends: - resp = await client_node.push_tx(sb) - assert resp["success"] + push_resp = await client_node.push_tx(sb) + assert push_resp["success"] await full_node_api.process_spend_bundles([sb]) await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token)) @@ -476,7 +475,7 @@ async def test_nft_mint_from_did_rpc_no_royalties( for i in range(0, n, chunk): await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_maker, timeout=20) - resp: Dict[str, Any] = await client.nft_mint_bulk( + resp = await client.nft_mint_bulk( wallet_id=nft_wallet_maker["wallet_id"], metadata_list=metadata_list[i : i + chunk], target_list=target_list[i : i + chunk], @@ -491,8 +490,7 @@ async def test_nft_mint_from_did_rpc_no_royalties( mint_from_did=True, tx_config=DEFAULT_TX_CONFIG, ) - assert resp["success"] - sb: SpendBundle = SpendBundle.from_json_dict(resp["spend_bundle"]) + sb: SpendBundle = resp.spend_bundle did_lineage_parent = [cn for cn in sb.removals() if cn.name() == did_coin.name()][0].parent_coin_info.hex() did_coin = [cn for cn in sb.additions() if (cn.parent_coin_info == did_coin.name()) and (cn.amount == 1)][0] spends.append(sb) @@ -501,8 +499,8 @@ async def test_nft_mint_from_did_rpc_no_royalties( next_coin = xch_adds[0] for sb in spends: - resp = await client_node.push_tx(sb) - assert resp["success"] + push_resp = await client_node.push_tx(sb) + assert push_resp["success"] await full_node_api.process_spend_bundles([sb]) await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token)) @@ -883,7 +881,7 @@ async def test_nft_mint_from_xch_rpc( for i in range(0, n, chunk): await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_maker, timeout=20) - resp: Dict[str, Any] = await client.nft_mint_bulk( + resp = await client.nft_mint_bulk( wallet_id=nft_wallet_maker["wallet_id"], metadata_list=metadata_list[i : i + chunk], target_list=target_list[i : i + chunk], @@ -897,16 +895,15 @@ async def test_nft_mint_from_xch_rpc( fee=fee, tx_config=DEFAULT_TX_CONFIG, ) - assert resp["success"] - sb: SpendBundle = SpendBundle.from_json_dict(resp["spend_bundle"]) + sb: SpendBundle = resp.spend_bundle spends.append(sb) xch_adds = [c for c in sb.additions() if c.puzzle_hash == funding_coin.puzzle_hash] assert len(xch_adds) == 1 next_coin = xch_adds[0] for sb in spends: - resp = await client_node.push_tx(sb) - assert resp["success"] + push_resp = await client_node.push_tx(sb) + assert push_resp["success"] await full_node_api.process_spend_bundles([sb]) await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token)) diff --git a/chia/_tests/wallet/rpc/test_wallet_rpc.py b/chia/_tests/wallet/rpc/test_wallet_rpc.py index a4d1cc1a03b1..5974541e6959 100644 --- a/chia/_tests/wallet/rpc/test_wallet_rpc.py +++ b/chia/_tests/wallet/rpc/test_wallet_rpc.py @@ -83,6 +83,7 @@ from chia.wallet.transaction_sorting import SortKey from chia.wallet.uncurried_puzzle import uncurry_puzzle from chia.wallet.util.address_type import AddressType +from chia.wallet.util.blind_signer_tl import BLIND_SIGNER_TRANSLATION from chia.wallet.util.clvm_streamable import byte_deserialize_clvm_streamable from chia.wallet.util.compute_memos import compute_memos from chia.wallet.util.query_filter import AmountFilter, HashFilter, TransactionTypeFilter @@ -311,19 +312,21 @@ async def test_send_transaction(wallet_rpc_environment: WalletRpcTestEnvironment # Tests sending a basic transaction extra_conditions = (Remark(Program.to(("test", None))),) non_existent_coin = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)) - tx_no_push = await client.send_transaction( - 1, - tx_amount, - addr, - memos=["this is a basic tx"], - tx_config=DEFAULT_TX_CONFIG.override( - excluded_coin_amounts=[uint64(250000000000)], - excluded_coin_ids=[non_existent_coin.name()], - reuse_puzhash=True, - ), - extra_conditions=extra_conditions, - push=False, - ) + tx_no_push = ( + await client.send_transaction( + 1, + tx_amount, + addr, + memos=["this is a basic tx"], + tx_config=DEFAULT_TX_CONFIG.override( + excluded_coin_amounts=[uint64(250000000000)], + excluded_coin_ids=[non_existent_coin.name()], + reuse_puzhash=True, + ), + extra_conditions=extra_conditions, + push=False, + ) + ).transaction response = await client.fetch( "send_transaction", { @@ -338,13 +341,16 @@ async def test_send_transaction(wallet_rpc_environment: WalletRpcTestEnvironment "exclude_coins": [non_existent_coin.to_json_dict()], "reuse_puzhash": True, "CHIP-0029": True, + "translation": "CHIP-0028", "push": True, }, ) assert response["success"] tx = TransactionRecord.from_json_dict_convenience(response["transactions"][0]) [ - byte_deserialize_clvm_streamable(bytes.fromhex(utx), UnsignedTransaction) + byte_deserialize_clvm_streamable( + bytes.fromhex(utx), UnsignedTransaction, translation_layer=BLIND_SIGNER_TRANSLATION + ) for utx in response["unsigned_transactions"] ] assert tx == dataclasses.replace(tx_no_push, created_at_time=tx.created_at_time) @@ -380,11 +386,13 @@ async def test_push_transactions(wallet_rpc_environment: WalletRpcTestEnvironmen outputs = await create_tx_outputs(wallet, [(1234321, None)]) - tx = await client.create_signed_transaction( - outputs, - tx_config=DEFAULT_TX_CONFIG, - fee=uint64(100), - ) + tx = ( + await client.create_signed_transactions( + outputs, + tx_config=DEFAULT_TX_CONFIG, + fee=uint64(100), + ) + ).signed_tx await client.push_transactions([tx]) resp = await client.fetch("push_transactions", {"transactions": [tx.to_json_dict_convenience(wallet_node.config)]}) @@ -547,17 +555,19 @@ async def test_create_signed_transaction( ) assert len(selected_coin) == 1 - tx = await wallet_1_rpc.create_signed_transaction( - outputs, - coins=selected_coin, - fee=amount_fee, - wallet_id=wallet_id, - # shouldn't actually block it - tx_config=DEFAULT_TX_CONFIG.override( - excluded_coin_amounts=[uint64(selected_coin[0].amount)] if selected_coin is not None else [], - ), - push=True, - ) + tx = ( + await wallet_1_rpc.create_signed_transactions( + outputs, + coins=selected_coin, + fee=amount_fee, + wallet_id=wallet_id, + # shouldn't actually block it + tx_config=DEFAULT_TX_CONFIG.override( + excluded_coin_amounts=[uint64(selected_coin[0].amount)] if selected_coin is not None else [], + ), + push=True, + ) + ).signed_tx change_expected = not selected_coin or selected_coin[0].amount - amount_total > 0 assert_tx_amounts(tx, outputs, amount_fee=amount_fee, change_expected=change_expected, is_cat=is_cat) @@ -624,9 +634,11 @@ async def test_create_signed_transaction_with_coin_announcement(wallet_rpc_envir ), ] outputs = await create_tx_outputs(wallet_2, [(signed_tx_amount, None)]) - tx_res: TransactionRecord = await client.create_signed_transaction( - outputs, tx_config=DEFAULT_TX_CONFIG, extra_conditions=(*tx_coin_announcements,) - ) + tx_res: TransactionRecord = ( + await client.create_signed_transactions( + outputs, tx_config=DEFAULT_TX_CONFIG, extra_conditions=(*tx_coin_announcements,) + ) + ).signed_tx assert_tx_amounts(tx_res, outputs, amount_fee=uint64(0), change_expected=True) await assert_push_tx_error(client_node, tx_res) @@ -654,9 +666,11 @@ async def test_create_signed_transaction_with_puzzle_announcement(wallet_rpc_env ), ] outputs = await create_tx_outputs(wallet_2, [(signed_tx_amount, None)]) - tx_res = await client.create_signed_transaction( - outputs, tx_config=DEFAULT_TX_CONFIG, extra_conditions=(*tx_puzzle_announcements,) - ) + tx_res = ( + await client.create_signed_transactions( + outputs, tx_config=DEFAULT_TX_CONFIG, extra_conditions=(*tx_puzzle_announcements,) + ) + ).signed_tx assert_tx_amounts(tx_res, outputs, amount_fee=uint64(0), change_expected=True) await assert_push_tx_error(client_node, tx_res) @@ -677,12 +691,14 @@ async def it_does_not_include_the_excluded_coins() -> None: assert len(selected_coins) == 1 outputs = await create_tx_outputs(wallet_1, [(uint64(250000000000), None)]) - tx = await wallet_1_rpc.create_signed_transaction( - outputs, - DEFAULT_TX_CONFIG.override( - excluded_coin_ids=[c.name() for c in selected_coins], - ), - ) + tx = ( + await wallet_1_rpc.create_signed_transactions( + outputs, + DEFAULT_TX_CONFIG.override( + excluded_coin_ids=[c.name() for c in selected_coins], + ), + ) + ).signed_tx assert len(tx.removals) == 1 assert tx.removals[0] != selected_coins[0] @@ -697,7 +713,7 @@ async def it_throws_an_error_when_all_spendable_coins_are_excluded() -> None: outputs = await create_tx_outputs(wallet_1, [(uint64(1750000000000), None)]) with pytest.raises(ValueError): - await wallet_1_rpc.create_signed_transaction( + await wallet_1_rpc.create_signed_transactions( outputs, DEFAULT_TX_CONFIG.override( excluded_coin_ids=[c.name() for c in selected_coins], @@ -726,26 +742,30 @@ async def test_spend_clawback_coins(wallet_rpc_environment: WalletRpcTestEnviron wallet_1_puzhash = await wallet_1.get_new_puzzlehash() await full_node_api.wait_for_wallet_synced(wallet_node=wallet_1_node, timeout=20) wallet_2_puzhash = await wallet_2.get_new_puzzlehash() - tx = await wallet_1_rpc.send_transaction( - wallet_id=1, - amount=uint64(500), - address=encode_puzzle_hash(wallet_2_puzhash, "txch"), - tx_config=DEFAULT_TX_CONFIG, - fee=uint64(0), - puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 5}], - ) + tx = ( + await wallet_1_rpc.send_transaction( + wallet_id=1, + amount=uint64(500), + address=encode_puzzle_hash(wallet_2_puzhash, "txch"), + tx_config=DEFAULT_TX_CONFIG, + fee=uint64(0), + puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 5}], + ) + ).transaction clawback_coin_id_1 = tx.additions[0].name() assert tx.spend_bundle is not None await farm_transaction(full_node_api, wallet_1_node, tx.spend_bundle) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_2_node, timeout=20) - tx = await wallet_2_rpc.send_transaction( - wallet_id=1, - amount=uint64(500), - address=encode_puzzle_hash(wallet_1_puzhash, "txch"), - tx_config=DEFAULT_TX_CONFIG, - fee=uint64(0), - puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 5}], - ) + tx = ( + await wallet_2_rpc.send_transaction( + wallet_id=1, + amount=uint64(500), + address=encode_puzzle_hash(wallet_1_puzhash, "txch"), + tx_config=DEFAULT_TX_CONFIG, + fee=uint64(0), + puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 5}], + ) + ).transaction assert tx.spend_bundle is not None clawback_coin_id_2 = tx.additions[0].name() await farm_transaction(full_node_api, wallet_2_node, tx.spend_bundle) @@ -788,8 +808,10 @@ async def test_spend_clawback_coins(wallet_rpc_environment: WalletRpcTestEnviron resp = await wallet_1_rpc.spend_clawback_coins([fake_coin.name()], 100) assert resp["transaction_ids"] == [] # Test coin puzzle hash doesn't match the puzzle - tx = (await wallet_1.wallet_state_manager.tx_store.get_farming_rewards())[0] - await wallet_1.wallet_state_manager.tx_store.add_transaction_record(dataclasses.replace(tx, name=fake_coin.name())) + farmed_tx = (await wallet_1.wallet_state_manager.tx_store.get_farming_rewards())[0] + await wallet_1.wallet_state_manager.tx_store.add_transaction_record( + dataclasses.replace(farmed_tx, name=fake_coin.name()) + ) await wallet_1_node.wallet_state_manager.coin_store.add_coin_record( dataclasses.replace(coin_record, coin=fake_coin) ) @@ -801,10 +823,10 @@ async def test_spend_clawback_coins(wallet_rpc_environment: WalletRpcTestEnviron assert resp["success"] assert len(resp["transaction_ids"]) == 2 for _tx in resp["transactions"]: - tx = TransactionRecord.from_json_dict_convenience(_tx) - if tx.spend_bundle is not None: + clawback_tx = TransactionRecord.from_json_dict_convenience(_tx) + if clawback_tx.spend_bundle is not None: await time_out_assert_not_none( - 10, full_node_api.full_node.mempool_manager.get_spendbundle, tx.spend_bundle.name() + 10, full_node_api.full_node.mempool_manager.get_spendbundle, clawback_tx.spend_bundle.name() ) await farm_transaction_block(full_node_api, wallet_2_node) await time_out_assert(20, get_confirmed_balance, generated_funds + 300, wallet_2_rpc, 1) @@ -832,13 +854,15 @@ async def test_send_transaction_multi(wallet_rpc_environment: WalletRpcTestEnvir amount_outputs = sum(output["amount"] for output in outputs) amount_fee = uint64(amount_outputs + 1) - send_tx_res: TransactionRecord = await client.send_transaction_multi( - 1, - outputs, - DEFAULT_TX_CONFIG, - coins=removals, - fee=amount_fee, - ) + send_tx_res: TransactionRecord = ( + await client.send_transaction_multi( + 1, + outputs, + DEFAULT_TX_CONFIG, + coins=removals, + fee=amount_fee, + ) + ).transaction spend_bundle = send_tx_res.spend_bundle assert spend_bundle is not None assert send_tx_res is not None @@ -1070,8 +1094,8 @@ async def test_cat_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment): ["the cat memo"], ) tx_res = await client.cat_spend(cat_0_id, DEFAULT_TX_CONFIG, uint64(4), addr_1, uint64(0), ["the cat memo"]) - assert tx_res.wallet_id == cat_0_id - spend_bundle = tx_res.spend_bundle + assert tx_res.transaction.wallet_id == cat_0_id + spend_bundle = tx_res.transaction.spend_bundle assert spend_bundle is not None assert uncurry_puzzle(spend_bundle.coin_spends[0].puzzle_reveal.to_program()).mod == CAT_MOD await farm_transaction(full_node_api, wallet_node, spend_bundle) @@ -1080,8 +1104,8 @@ async def test_cat_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment): # Test CAT spend with a fee tx_res = await client.cat_spend(cat_0_id, DEFAULT_TX_CONFIG, uint64(1), addr_1, uint64(5_000_000), ["the cat memo"]) - assert tx_res.wallet_id == cat_0_id - spend_bundle = tx_res.spend_bundle + assert tx_res.transaction.wallet_id == cat_0_id + spend_bundle = tx_res.transaction.spend_bundle assert spend_bundle is not None assert uncurry_puzzle(spend_bundle.coin_spends[0].puzzle_reveal.to_program()).mod == CAT_MOD await farm_transaction(full_node_api, wallet_node, spend_bundle) @@ -1093,10 +1117,10 @@ async def test_cat_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment): tx_res = await client.cat_spend( cat_0_id, DEFAULT_TX_CONFIG, uint64(1), addr_1, uint64(5_000_000), ["the cat memo"], removals=removals ) - assert tx_res.wallet_id == cat_0_id - spend_bundle = tx_res.spend_bundle + assert tx_res.transaction.wallet_id == cat_0_id + spend_bundle = tx_res.transaction.spend_bundle assert spend_bundle is not None - assert removals[0] in tx_res.removals + assert removals[0] in tx_res.transaction.removals assert uncurry_puzzle(spend_bundle.coin_spends[0].puzzle_reveal.to_program()).mod == CAT_MOD await farm_transaction(full_node_api, wallet_node, spend_bundle) @@ -1144,9 +1168,11 @@ async def test_offer_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment) await wallet_2_rpc.create_wallet_for_existing_cat(cat_asset_id) wallet_2_address = await wallet_2_rpc.get_next_address(cat_wallet_id, False) adds = [{"puzzle_hash": decode_puzzle_hash(wallet_2_address), "amount": uint64(4), "memos": ["the cat memo"]}] - tx_res = await wallet_1_rpc.send_transaction_multi( - cat_wallet_id, additions=adds, tx_config=DEFAULT_TX_CONFIG, fee=uint64(0) - ) + tx_res = ( + await wallet_1_rpc.send_transaction_multi( + cat_wallet_id, additions=adds, tx_config=DEFAULT_TX_CONFIG, fee=uint64(0) + ) + ).transaction spend_bundle = tx_res.spend_bundle assert spend_bundle is not None await farm_transaction(full_node_api, wallet_node, spend_bundle) @@ -1159,22 +1185,21 @@ async def test_offer_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment) with pytest.raises(ValueError): await wallet_1_rpc.get_coin_records_by_names([a.name() for a in spend_bundle.additions() if a.amount == 4]) # Create an offer of 5 chia for one CAT - offer, trade_record = await wallet_1_rpc.create_offer_for_ids( + await wallet_1_rpc.create_offer_for_ids( {uint32(1): -5, cat_asset_id.hex(): 1}, DEFAULT_TX_CONFIG, validate_only=True ) all_offers = await wallet_1_rpc.get_all_offers() assert len(all_offers) == 0 - assert offer is None driver_dict: Dict[str, Any] = {cat_asset_id.hex(): {"type": "CAT", "tail": "0x" + cat_asset_id.hex()}} - offer, trade_record = await wallet_1_rpc.create_offer_for_ids( + create_res = await wallet_1_rpc.create_offer_for_ids( {uint32(1): -5, cat_asset_id.hex(): 1}, DEFAULT_TX_CONFIG, driver_dict=driver_dict, fee=uint64(1), ) - assert offer is not None + offer = create_res.offer id, summary = await wallet_1_rpc.get_offer_summary(offer) assert id == offer.name() @@ -1204,7 +1229,7 @@ async def test_offer_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment) assert TradeStatus(all_offers[0].status) == TradeStatus.PENDING_ACCEPT assert all_offers[0].offer == bytes(offer) - trade_record = await wallet_2_rpc.take_offer(offer, DEFAULT_TX_CONFIG, fee=uint64(1)) + trade_record = (await wallet_2_rpc.take_offer(offer, DEFAULT_TX_CONFIG, fee=uint64(1))).trade_record assert TradeStatus(trade_record.status) == TradeStatus.PENDING_CONFIRM await wallet_1_rpc.cancel_offer(offer.name(), DEFAULT_TX_CONFIG, secure=False) @@ -1218,11 +1243,12 @@ async def test_offer_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment) trade_record = await wallet_1_rpc.get_offer(offer.name()) assert TradeStatus(trade_record.status) == TradeStatus.PENDING_CANCEL - new_offer, new_trade_record = await wallet_1_rpc.create_offer_for_ids( + create_res = await wallet_1_rpc.create_offer_for_ids( {uint32(1): -5, cat_wallet_id: 1}, DEFAULT_TX_CONFIG, fee=uint64(1) ) all_offers = await wallet_1_rpc.get_all_offers() assert len(all_offers) == 2 + new_trade_record = create_res.trade_record await farm_transaction_block(full_node_api, wallet_node) @@ -1365,7 +1391,7 @@ async def test_get_coin_records_by_names(wallet_rpc_environment: WalletRpcTestEn await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) # Spend half of it back to the same wallet get some spent coins in the wallet - tx = await client.send_transaction(1, uint64(generated_funds / 2), address, DEFAULT_TX_CONFIG) + tx = (await client.send_transaction(1, uint64(generated_funds / 2), address, DEFAULT_TX_CONFIG)).transaction assert tx.spend_bundle is not None await time_out_assert(20, tx_in_mempool, True, client, tx.name) await farm_transaction(full_node_api, wallet_node, tx.spend_bundle) @@ -1455,8 +1481,8 @@ async def test_did_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment): await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 1) await farm_transaction_block(full_node_api, wallet_1_node) # Update recovery list - res = await wallet_1_rpc.update_did_recovery_list(did_wallet_id_0, [did_id_0], 1, DEFAULT_TX_CONFIG) - assert res["success"] + update_res = await wallet_1_rpc.update_did_recovery_list(did_wallet_id_0, [did_id_0], 1, DEFAULT_TX_CONFIG) + assert len(update_res.transactions) > 0 res = await wallet_1_rpc.get_did_recovery_list(did_wallet_id_0) assert res["num_required"] == 1 assert res["recovery_list"][0] == did_id_0 @@ -1467,8 +1493,7 @@ async def test_did_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment): # Update metadata with pytest.raises(ValueError, match="wallet id 1 is of type Wallet but type DIDWallet is required"): await wallet_1_rpc.update_did_metadata(wallet_1_id, {"Twitter": "Https://test"}, DEFAULT_TX_CONFIG) - res = await wallet_1_rpc.update_did_metadata(did_wallet_id_0, {"Twitter": "Https://test"}, DEFAULT_TX_CONFIG) - assert res["success"] + await wallet_1_rpc.update_did_metadata(did_wallet_id_0, {"Twitter": "Https://test"}, DEFAULT_TX_CONFIG) await farm_transaction_block(full_node_api, wallet_1_node) @@ -1480,8 +1505,7 @@ async def test_did_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment): # Transfer DID addr = encode_puzzle_hash(await wallet_2.get_new_puzzlehash(), "txch") - res = await wallet_1_rpc.did_transfer_did(did_wallet_id_0, addr, 0, True, DEFAULT_TX_CONFIG) - assert res["success"] + await wallet_1_rpc.did_transfer_did(did_wallet_id_0, addr, 0, True, DEFAULT_TX_CONFIG) await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 1) await farm_transaction_block(full_node_api, wallet_1_node) @@ -1507,9 +1531,7 @@ async def num_wallets() -> int: assert metadata["Twitter"] == "Https://test" last_did_coin = await did_wallet_2.get_coin() - SpendBundle.from_json_dict( - (await wallet_2_rpc.did_message_spend(did_wallet_2.id(), DEFAULT_TX_CONFIG, push=True))["spend_bundle"] - ) + await wallet_2_rpc.did_message_spend(did_wallet_2.id(), DEFAULT_TX_CONFIG, push=True) await wallet_2_node.wallet_state_manager.add_interested_coin_ids([last_did_coin.name()]) await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 1) @@ -1519,13 +1541,7 @@ async def num_wallets() -> int: assert next_did_coin.parent_coin_info == last_did_coin.name() last_did_coin = next_did_coin - SpendBundle.from_json_dict( - ( - await wallet_2_rpc.did_message_spend( - did_wallet_2.id(), DEFAULT_TX_CONFIG.override(reuse_puzhash=True), push=True - ) - )["spend_bundle"], - ) + await wallet_2_rpc.did_message_spend(did_wallet_2.id(), DEFAULT_TX_CONFIG.override(reuse_puzhash=True), push=True) await wallet_2_node.wallet_state_manager.add_interested_coin_ids([last_did_coin.name()]) await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 1) @@ -1550,7 +1566,7 @@ async def test_nft_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment): res = await wallet_1_rpc.create_new_nft_wallet(None) nft_wallet_id = res["wallet_id"] - res = await wallet_1_rpc.mint_nft( + mint_res = await wallet_1_rpc.mint_nft( nft_wallet_id, None, None, @@ -1558,9 +1574,8 @@ async def test_nft_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment): ["https://www.chia.net/img/branding/chia-logo.svg"], DEFAULT_TX_CONFIG, ) - assert res["success"] - spend_bundle = SpendBundle.from_json_dict(json_dict=res["spend_bundle"]) + spend_bundle = mint_res.spend_bundle await farm_transaction(full_node_api, wallet_1_node, spend_bundle) @@ -1586,8 +1601,7 @@ async def have_nfts(): assert nft_info["nft_coin_id"][2:] == (await nft_wallet.get_current_nfts())[0].coin.name().hex() addr = encode_puzzle_hash(await wallet_2.get_new_puzzlehash(), "txch") - res = await wallet_1_rpc.transfer_nft(nft_wallet_id, nft_id, addr, 0, DEFAULT_TX_CONFIG) - assert res["success"] + await wallet_1_rpc.transfer_nft(nft_wallet_id, nft_id, addr, 0, DEFAULT_TX_CONFIG) await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 1) await farm_transaction_block(full_node_api, wallet_1_node) await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 0) @@ -1689,7 +1703,7 @@ async def test_key_and_address_endpoints(wallet_rpc_environment: WalletRpcTestEn addr = encode_puzzle_hash(ph, "txch") tx_amount = uint64(15600000) await env.full_node.api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - created_tx = await client.send_transaction(1, tx_amount, addr, DEFAULT_TX_CONFIG) + created_tx = (await client.send_transaction(1, tx_amount, addr, DEFAULT_TX_CONFIG)).transaction await time_out_assert(20, tx_in_mempool, True, client, created_tx.name) assert len(await wallet.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(1)) == 1 @@ -1779,7 +1793,7 @@ async def test_select_coins_rpc(wallet_rpc_environment: WalletRpcTestEnvironment for tx_amount in tx_amounts: funds -= tx_amount # create coins for tests - tx = await client.send_transaction(1, tx_amount, addr, DEFAULT_TX_CONFIG) + tx = (await client.send_transaction(1, tx_amount, addr, DEFAULT_TX_CONFIG)).transaction spend_bundle = tx.spend_bundle assert spend_bundle is not None for coin in spend_bundle.additions(): @@ -2335,14 +2349,16 @@ async def test_set_wallet_resync_on_startup(wallet_rpc_environment: WalletRpcTes wallet_node: WalletNode = env.wallet_1.node wallet_node_2: WalletNode = env.wallet_2.node # Test Clawback resync - tx = await wc.send_transaction( - wallet_id=1, - amount=uint64(500), - address=address, - tx_config=DEFAULT_TX_CONFIG, - fee=uint64(0), - puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 5}], - ) + tx = ( + await wc.send_transaction( + wallet_id=1, + amount=uint64(500), + address=address, + tx_config=DEFAULT_TX_CONFIG, + fee=uint64(0), + puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 5}], + ) + ).transaction clawback_coin_id = tx.additions[0].name() assert tx.spend_bundle is not None await farm_transaction(full_node_api, wallet_node, tx.spend_bundle) @@ -2487,7 +2503,7 @@ async def test_cat_spend_run_tail(wallet_rpc_environment: WalletRpcTestEnvironme ) tx_amount = uint64(100) - tx = await client.send_transaction(1, tx_amount, addr, DEFAULT_TX_CONFIG) + tx = (await client.send_transaction(1, tx_amount, addr, DEFAULT_TX_CONFIG)).transaction transaction_id = tx.name spend_bundle = tx.spend_bundle assert spend_bundle is not None @@ -2527,13 +2543,15 @@ async def test_cat_spend_run_tail(wallet_rpc_environment: WalletRpcTestEnvironme await time_out_assert(20, get_confirmed_balance, tx_amount, client, cat_wallet_id) # Attempt to melt it fully - tx = await client.cat_spend( - cat_wallet_id, - amount=uint64(0), - tx_config=DEFAULT_TX_CONFIG, - inner_address=encode_puzzle_hash(our_ph, "txch"), - cat_discrepancy=(tx_amount * -1, Program.to(None), Program.to(None)), - ) + tx = ( + await client.cat_spend( + cat_wallet_id, + amount=uint64(0), + tx_config=DEFAULT_TX_CONFIG, + inner_address=encode_puzzle_hash(our_ph, "txch"), + cat_discrepancy=(tx_amount * -1, Program.to(None), Program.to(None)), + ) + ).transaction transaction_id = tx.name spend_bundle = tx.spend_bundle assert spend_bundle is not None diff --git a/chia/_tests/wallet/test_clvm_streamable.py b/chia/_tests/wallet/test_clvm_streamable.py index 20f9010a5875..19a84bf14be3 100644 --- a/chia/_tests/wallet/test_clvm_streamable.py +++ b/chia/_tests/wallet/test_clvm_streamable.py @@ -6,8 +6,13 @@ import pytest from chia.types.blockchain_format.program import Program +from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.util.ints import uint64 from chia.util.streamable import Streamable, streamable +from chia.wallet.signer_protocol import Coin, Spend from chia.wallet.util.clvm_streamable import ( + TranslationLayer, + TranslationLayerMapping, byte_deserialize_clvm_streamable, byte_serialize_clvm_streamable, clvm_streamable, @@ -114,3 +119,91 @@ def test_compound_type_serialization() -> None: @dataclasses.dataclass(frozen=True) class DoesntWork(Streamable): tuples_are_not_supported: Tuple[str] + + +@clvm_streamable +@dataclasses.dataclass(frozen=True) +class FooSpend(Streamable): + coin: Coin + puzzle_and_solution: Program + + @staticmethod + def from_wallet_api(_from: Spend) -> FooSpend: + return FooSpend( + _from.coin, + Program.to((_from.puzzle, _from.solution)), + ) + + @staticmethod + def to_wallet_api(_from: FooSpend) -> Spend: + return Spend( + _from.coin, + _from.puzzle_and_solution.first(), + _from.puzzle_and_solution.rest(), + ) + + +def test_translation_layer() -> None: + FOO_TRANSLATION = TranslationLayer( + [ + TranslationLayerMapping( + Spend, + FooSpend, + FooSpend.from_wallet_api, + FooSpend.to_wallet_api, + ) + ] + ) + + coin = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)) + spend = Spend( + coin, + Program.to("puzzle"), + Program.to("solution"), + ) + foo_spend = FooSpend( + coin, + Program.to(("puzzle", "solution")), + ) + + byte_serialize_clvm_streamable(foo_spend) == byte_serialize_clvm_streamable( + spend, translation_layer=FOO_TRANSLATION + ) + program_serialize_clvm_streamable(foo_spend) == program_serialize_clvm_streamable( + spend, translation_layer=FOO_TRANSLATION + ) + json_serialize_with_clvm_streamable(foo_spend) == json_serialize_with_clvm_streamable( + spend, translation_layer=FOO_TRANSLATION + ) + assert spend == byte_deserialize_clvm_streamable( + byte_serialize_clvm_streamable(foo_spend), Spend, translation_layer=FOO_TRANSLATION + ) + assert spend == program_deserialize_clvm_streamable( + program_serialize_clvm_streamable(foo_spend), Spend, translation_layer=FOO_TRANSLATION + ) + assert spend == json_deserialize_with_clvm_streamable( + json_serialize_with_clvm_streamable(foo_spend), Spend, translation_layer=FOO_TRANSLATION + ) + + # Deserialization should only work now if using the translation layer + with pytest.raises(Exception): + byte_deserialize_clvm_streamable(byte_serialize_clvm_streamable(foo_spend), Spend) + with pytest.raises(Exception): + program_deserialize_clvm_streamable(program_serialize_clvm_streamable(foo_spend), Spend) + with pytest.raises(Exception): + json_deserialize_with_clvm_streamable(json_serialize_with_clvm_streamable(foo_spend), Spend) + + # Test that types not registered with translation layer are serialized properly + assert coin == byte_deserialize_clvm_streamable( + byte_serialize_clvm_streamable(coin, translation_layer=FOO_TRANSLATION), Coin, translation_layer=FOO_TRANSLATION + ) + assert coin == program_deserialize_clvm_streamable( + program_serialize_clvm_streamable(coin, translation_layer=FOO_TRANSLATION), + Coin, + translation_layer=FOO_TRANSLATION, + ) + assert coin == json_deserialize_with_clvm_streamable( + json_serialize_with_clvm_streamable(coin, translation_layer=FOO_TRANSLATION), + Coin, + translation_layer=FOO_TRANSLATION, + ) diff --git a/chia/_tests/wallet/test_signer_protocol.py b/chia/_tests/wallet/test_signer_protocol.py index e2a310b3f61d..cc6b33146b82 100644 --- a/chia/_tests/wallet/test_signer_protocol.py +++ b/chia/_tests/wallet/test_signer_protocol.py @@ -3,11 +3,35 @@ import dataclasses from typing import List, Optional +import click import pytest from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey +from click.testing import CliRunner +from chia._tests.cmds.test_cmd_framework import check_click_parsing +from chia._tests.cmds.wallet.test_consts import STD_TX from chia._tests.environments.wallet import WalletStateTransition, WalletTestFramework -from chia.rpc.wallet_request_types import ApplySignatures, GatherSigningInfo, SubmitTransactions +from chia.cmds.cmd_classes import NeedsWalletRPC, WalletClientInfo, chia_command +from chia.cmds.cmds_util import TransactionBundle +from chia.cmds.signer import ( + ApplySignaturesCMD, + ExecuteSigningInstructionsCMD, + GatherSigningInfoCMD, + PushTransactionsCMD, + QrCodeDisplay, + SPIn, + SPOut, + TransactionsIn, + TransactionsOut, +) +from chia.rpc.util import ALL_TRANSLATION_LAYERS +from chia.rpc.wallet_request_types import ( + ApplySignatures, + ExecuteSigningInstructions, + GatherSigningInfo, + GatherSigningInfoResponse, + SubmitTransactions, +) from chia.rpc.wallet_rpc_client import WalletRpcClient from chia.types.blockchain_format.coin import Coin as ConsensusCoin from chia.types.blockchain_format.program import Program @@ -16,6 +40,7 @@ from chia.types.spend_bundle import SpendBundle from chia.util.hash import std_hash from chia.util.ints import uint64 +from chia.util.streamable import Streamable from chia.wallet.conditions import AggSigMe from chia.wallet.derivation_record import DerivationRecord from chia.wallet.derive_keys import _derive_path_unhardened @@ -24,6 +49,7 @@ calculate_synthetic_offset, ) from chia.wallet.signer_protocol import ( + Coin, KeyHints, PathHint, SignedTransaction, @@ -35,8 +61,24 @@ TransactionInfo, UnsignedTransaction, ) -from chia.wallet.util.clvm_streamable import json_deserialize_with_clvm_streamable, json_serialize_with_clvm_streamable -from chia.wallet.util.tx_config import DEFAULT_COIN_SELECTION_CONFIG +from chia.wallet.util.blind_signer_tl import ( + BLIND_SIGNER_TRANSLATION, + BSTLPathHint, + BSTLSigningInstructions, + BSTLSigningResponse, + BSTLSigningTarget, + BSTLSumHint, + BSTLUnsignedTransaction, +) +from chia.wallet.util.clvm_streamable import ( + TranslationLayer, + TranslationLayerMapping, + byte_serialize_clvm_streamable, + clvm_streamable, + json_deserialize_with_clvm_streamable, + json_serialize_with_clvm_streamable, +) +from chia.wallet.util.tx_config import DEFAULT_COIN_SELECTION_CONFIG, DEFAULT_TX_CONFIG from chia.wallet.wallet import Wallet from chia.wallet.wallet_state_manager import WalletStateManager @@ -135,9 +177,9 @@ async def test_p2dohp_wallet_signer_protocol(wallet_environments: WalletTestFram assert utx.signing_instructions.targets[0].fingerprint == synthetic_pubkey.get_fingerprint().to_bytes(4, "big") assert utx.signing_instructions.targets[0].message == message - signing_responses: List[SigningResponse] = await wallet_state_manager.execute_signing_instructions( - utx.signing_instructions - ) + signing_responses: List[SigningResponse] = ( + await wallet_rpc.execute_signing_instructions(ExecuteSigningInstructions(utx.signing_instructions)) + ).signing_responses assert len(signing_responses) == 1 assert signing_responses[0].hook == utx.signing_instructions.targets[0].hook assert AugSchemeMPL.verify(synthetic_pubkey, message, G2Element.from_bytes(signing_responses[0].signature)) @@ -244,6 +286,16 @@ async def test_p2dohp_wallet_signer_protocol(wallet_environments: WalletTestFram ] ) + # And test that we can get compressed versions if we want + request = GatherSigningInfo( + [Spend.from_coin_spend(coin_spend), Spend.from_coin_spend(not_our_coin_spend)] + ).to_json_dict() + response_dict = await wallet_rpc.fetch("gather_signing_info", {"translation": "chip-0029", **request}) + response: GatherSigningInfoResponse = json_deserialize_with_clvm_streamable( + response_dict, GatherSigningInfoResponse, translation_layer=BLIND_SIGNER_TRANSLATION + ) + assert response.signing_instructions == not_our_utx.signing_instructions + @pytest.mark.parametrize( "wallet_environments", @@ -447,3 +499,445 @@ async def test_p2blsdohp_execute_signing_instructions(wallet_environments: Walle partial_allowed=True, ) assert signing_responses == [SigningResponse(bytes(AugSchemeMPL.sign(other_sk, test_name, sum_pk)), test_name)] + + +def test_blind_signer_translation_layer() -> None: + sum_hints: List[SumHint] = [ + SumHint([b"a", b"b", b"c"], b"offset", b"final"), + SumHint([b"c", b"b", b"a"], b"offset2", b"final"), + ] + path_hints: List[PathHint] = [ + PathHint(b"root1", [uint64(1), uint64(2), uint64(3)]), + PathHint(b"root2", [uint64(4), uint64(5), uint64(6)]), + ] + signing_targets: List[SigningTarget] = [ + SigningTarget(b"pubkey", b"message", bytes32([0] * 32)), + SigningTarget(b"pubkey2", b"message2", bytes32([1] * 32)), + ] + + instructions: SigningInstructions = SigningInstructions( + KeyHints(sum_hints, path_hints), + signing_targets, + ) + transaction: UnsignedTransaction = UnsignedTransaction( + TransactionInfo([]), + instructions, + ) + signing_response: SigningResponse = SigningResponse( + b"signature", + bytes32([1] * 32), + ) + + bstl_sum_hints: List[BSTLSumHint] = [ + BSTLSumHint([b"a", b"b", b"c"], b"offset", b"final"), + BSTLSumHint([b"c", b"b", b"a"], b"offset2", b"final"), + ] + bstl_path_hints: List[BSTLPathHint] = [ + BSTLPathHint(b"root1", [uint64(1), uint64(2), uint64(3)]), + BSTLPathHint(b"root2", [uint64(4), uint64(5), uint64(6)]), + ] + bstl_signing_targets: List[BSTLSigningTarget] = [ + BSTLSigningTarget(b"pubkey", b"message", bytes32([0] * 32)), + BSTLSigningTarget(b"pubkey2", b"message2", bytes32([1] * 32)), + ] + + bstl_instructions: BSTLSigningInstructions = BSTLSigningInstructions( + bstl_sum_hints, + bstl_path_hints, + bstl_signing_targets, + ) + bstl_transaction: BSTLUnsignedTransaction = BSTLUnsignedTransaction( + bstl_sum_hints, + bstl_path_hints, + bstl_signing_targets, + ) + bstl_signing_response: BSTLSigningResponse = BSTLSigningResponse( + b"signature", + bytes32([1] * 32), + ) + bstl_instructions_json = json_serialize_with_clvm_streamable(bstl_instructions) + bstl_transaction_json = json_serialize_with_clvm_streamable(bstl_transaction) + bstl_signing_response_json = json_serialize_with_clvm_streamable(bstl_signing_response) + assert bstl_instructions_json == json_serialize_with_clvm_streamable( + instructions, translation_layer=BLIND_SIGNER_TRANSLATION + ) + assert bstl_transaction_json == json_serialize_with_clvm_streamable( + transaction, translation_layer=BLIND_SIGNER_TRANSLATION + ) + assert bstl_signing_response_json == json_serialize_with_clvm_streamable( + signing_response, translation_layer=BLIND_SIGNER_TRANSLATION + ) + + assert ( + json_deserialize_with_clvm_streamable( + bstl_instructions_json, SigningInstructions, translation_layer=BLIND_SIGNER_TRANSLATION + ) + == instructions + ) + assert ( + json_deserialize_with_clvm_streamable( + bstl_transaction_json, UnsignedTransaction, translation_layer=BLIND_SIGNER_TRANSLATION + ) + == transaction + ) + assert ( + json_deserialize_with_clvm_streamable( + bstl_signing_response_json, SigningResponse, translation_layer=BLIND_SIGNER_TRANSLATION + ) + == signing_response + ) + + +@pytest.mark.parametrize( + "wallet_environments", + [ + { + "num_environments": 1, + "blocks_needed": [1], + "trusted": True, + "reuse_puzhash": True, + } + ], + indirect=True, +) +@pytest.mark.anyio +async def test_signer_commands(wallet_environments: WalletTestFramework) -> None: + wallet: Wallet = wallet_environments.environments[0].xch_wallet + wallet_state_manager: WalletStateManager = wallet_environments.environments[0].wallet_state_manager + wallet_rpc: WalletRpcClient = wallet_environments.environments[0].rpc_client + client_info: WalletClientInfo = WalletClientInfo( + wallet_rpc, + wallet_state_manager.root_pubkey.get_fingerprint(), + wallet_state_manager.config, + ) + + AMOUNT = uint64(1) + [tx] = await wallet.generate_signed_transaction(AMOUNT, bytes32([0] * 32), DEFAULT_TX_CONFIG) + + runner = CliRunner() + with runner.isolated_filesystem(): + with open("./temp-tb", "wb") as file: + file.write(bytes(TransactionBundle([tx]))) + + await GatherSigningInfoCMD( + rpc_info=NeedsWalletRPC(client_info=client_info), + sp_out=SPOut( + translation="CHIP-0028", + output_format="file", + output_file=["./temp-si"], + ), + txs_in=TransactionsIn(transaction_file_in="./temp-tb"), + ).run() + + await ExecuteSigningInstructionsCMD( + rpc_info=NeedsWalletRPC(client_info=client_info), + sp_in=SPIn( + translation="CHIP-0028", + signer_protocol_input=["./temp-si"], + ), + sp_out=SPOut( + translation="CHIP-0028", + output_format="file", + output_file=["./temp-sr"], + ), + ).run() + + await ApplySignaturesCMD( + rpc_info=NeedsWalletRPC(client_info=client_info), + txs_in=TransactionsIn(transaction_file_in="./temp-tb"), + sp_in=SPIn( + translation="CHIP-0028", + signer_protocol_input=["./temp-sr"], + ), + txs_out=TransactionsOut(transaction_file_out="./temp-stb"), + ).run() + + await PushTransactionsCMD( + rpc_info=NeedsWalletRPC(client_info=client_info), + txs_in=TransactionsIn(transaction_file_in="./temp-stb"), + ).run() + + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + 1: { + "unconfirmed_wallet_balance": -1 * AMOUNT, + "<=#spendable_balance": -1 * AMOUNT, + "<=#max_send_amount": -1 * AMOUNT, + "pending_change": sum(c.amount for c in tx.removals) - AMOUNT, + "pending_coin_removal_count": 1, + } + }, + post_block_balance_updates={ + 1: { + "confirmed_wallet_balance": -1 * AMOUNT, + "pending_change": -1 * (sum(c.amount for c in tx.removals) - AMOUNT), + "pending_coin_removal_count": -1, + "set_remainder": True, + }, + }, + ), + ] + ) + + +def test_signer_command_default_parsing() -> None: + check_click_parsing( + GatherSigningInfoCMD( + rpc_info=NeedsWalletRPC(client_info=None, wallet_rpc_port=None, fingerprint=None), + sp_out=SPOut( + translation="none", + output_format="hex", + output_file=tuple(), + ), + txs_in=TransactionsIn(transaction_file_in="in"), + ), + "-i", + "in", + ) + + check_click_parsing( + ExecuteSigningInstructionsCMD( + rpc_info=NeedsWalletRPC(client_info=None, wallet_rpc_port=None, fingerprint=None), + sp_in=SPIn( + translation="none", + signer_protocol_input=("sp-in",), + ), + sp_out=SPOut( + translation="none", + output_format="hex", + output_file=tuple(), + ), + ), + "-p", + "sp-in", + ) + + check_click_parsing( + ApplySignaturesCMD( + rpc_info=NeedsWalletRPC(client_info=None, wallet_rpc_port=None, fingerprint=None), + txs_in=TransactionsIn(transaction_file_in="in"), + sp_in=SPIn( + translation="none", + signer_protocol_input=("sp-in",), + ), + txs_out=TransactionsOut(transaction_file_out="out"), + ), + "-i", + "in", + "-o", + "out", + "-p", + "sp-in", + ) + + check_click_parsing( + PushTransactionsCMD( + rpc_info=NeedsWalletRPC(client_info=None, wallet_rpc_port=None, fingerprint=None), + txs_in=TransactionsIn(transaction_file_in="in"), + ), + "-i", + "in", + ) + + +def test_transactions_in() -> None: + @click.group() + def cmd() -> None: + pass + + @chia_command(cmd, "temp_cmd", "blah") + class TempCMD(TransactionsIn): + def run(self) -> None: + assert self.transaction_bundle == TransactionBundle([STD_TX]) + + runner = CliRunner() + with runner.isolated_filesystem(): + with open("some file", "wb") as file: + file.write(bytes(TransactionBundle([STD_TX]))) + + result = runner.invoke(cmd, ["temp_cmd", "--transaction-file-in", "some file"], catch_exceptions=False) + assert result.output == "" + + +def test_transactions_out() -> None: + @click.group() + def cmd() -> None: + pass + + @chia_command(cmd, "temp_cmd", "blah") + class TempCMD(TransactionsOut): + def run(self) -> None: + self.handle_transaction_output([STD_TX]) + + runner = CliRunner() + with runner.isolated_filesystem(): + result = runner.invoke(cmd, ["temp_cmd", "--transaction-file-out", "some file"], catch_exceptions=False) + assert result.output == "" + + with open("some file", "rb") as file: + file.read() == bytes(TransactionBundle([STD_TX])) + + +@clvm_streamable +@dataclasses.dataclass(frozen=True) +class FooCoin(Streamable): + amount: uint64 + + @staticmethod + def from_wallet_api(_from: Coin) -> FooCoin: + return FooCoin(_from.amount) + + @staticmethod + def to_wallet_api(_from: FooCoin) -> Coin: + return Coin( + bytes32([0] * 32), + bytes32([0] * 32), + _from.amount, + ) + + +FOO_COIN_TRANSLATION = TranslationLayer( + [ + TranslationLayerMapping( + Coin, + FooCoin, + FooCoin.from_wallet_api, + FooCoin.to_wallet_api, + ) + ] +) + + +def test_signer_protocol_in(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setitem(ALL_TRANSLATION_LAYERS, "CHIP-0028", FOO_COIN_TRANSLATION) + + @click.group() + def cmd() -> None: + pass + + coin = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(13)) + + @chia_command(cmd, "temp_cmd", "blah") + class TempCMD(SPIn): + def run(self) -> None: + assert self.read_sp_input(Coin) == [coin, coin] + + runner = CliRunner() + with runner.isolated_filesystem(): + with open("some file", "wb") as file: + file.write(byte_serialize_clvm_streamable(coin)) + + with open("some file2", "wb") as file: + file.write(byte_serialize_clvm_streamable(coin)) + + result = runner.invoke( + cmd, + ["temp_cmd", "--signer-protocol-input", "some file", "--signer-protocol-input", "some file2"], + catch_exceptions=False, + ) + assert result.output == "" + + with runner.isolated_filesystem(): + with open("some file", "wb") as file: + file.write(byte_serialize_clvm_streamable(coin, translation_layer=FOO_COIN_TRANSLATION)) + + with open("some file2", "wb") as file: + file.write(byte_serialize_clvm_streamable(coin, translation_layer=FOO_COIN_TRANSLATION)) + + result = runner.invoke( + cmd, ["temp_cmd", "--signer-protocol-input", "some file", "--signer-protocol-input", "some file2"] + ) + assert result.exception is not None + result = runner.invoke( + cmd, + [ + "temp_cmd", + "--signer-protocol-input", + "some file", + "--signer-protocol-input", + "some file2", + "--translation", + "CHIP-0028", + ], + catch_exceptions=False, + ) + assert result.output == "" + + +def test_signer_protocol_out(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setitem(ALL_TRANSLATION_LAYERS, "CHIP-0028", FOO_COIN_TRANSLATION) + + @click.group() + def cmd() -> None: + pass + + coin = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)) + coin_bytes = byte_serialize_clvm_streamable(coin) + + @chia_command(cmd, "temp_cmd", "blah") + class TempCMD(SPOut): + def run(self) -> None: + self.handle_clvm_output([coin, coin]) + + runner = CliRunner() + with runner.isolated_filesystem(): + result = runner.invoke(cmd, ["temp_cmd", "--output-format", "hex"], catch_exceptions=False) + assert result.output.strip() == coin_bytes.hex() + "\n" + coin_bytes.hex() + + result = runner.invoke(cmd, ["temp_cmd", "--output-format", "file"], catch_exceptions=False) + assert result.output == "--output-format=file specifed without any --output-file\n" + + result = runner.invoke( + cmd, ["temp_cmd", "--output-format", "file", "--output-file", "some file"], catch_exceptions=False + ) + assert "Incorrect number of file outputs specified" in result.output + + result = runner.invoke( + cmd, + ["temp_cmd", "--output-format", "file", "--output-file", "some file", "--output-file", "some file2"], + catch_exceptions=False, + ) + assert result.output == "" + + with open("some file", "rb") as file: + file.read() == coin_bytes + + with open("some file2", "rb") as file: + file.read() == coin_bytes + + result = runner.invoke(cmd, ["temp_cmd", "--output-format", "qr"], catch_exceptions=False) + assert result.output != "" # separate test for QrCodeDisplay + + result = runner.invoke( + cmd, ["temp_cmd", "--output-format", "hex", "--translation", "CHIP-0028"], catch_exceptions=False + ) + assert result.output.strip() != coin_bytes.hex() + coin_hex = byte_serialize_clvm_streamable(coin, translation_layer=ALL_TRANSLATION_LAYERS["CHIP-0028"]).hex() + assert result.output.strip() == coin_hex + "\n" + coin_hex + + +def test_qr_code_display() -> None: + @click.group() + def cmd() -> None: + pass + + bytes_to_encode = b"foo bar qat qux bam bat" + + @chia_command(cmd, "temp_cmd", "blah") + class TempCMD(QrCodeDisplay): + def run(self) -> None: + self.display_qr_codes([bytes_to_encode, bytes_to_encode]) + + runner = CliRunner() + result = runner.invoke( + cmd, + ["temp_cmd"], + input="\n", + catch_exceptions=False, + ) + + # Would be good to check eventually that the QR codes are valid but segno doesn't seem to provide that ATM + assert result.output.count("Displaying QR Codes (1/2)") == 1 + assert result.output.count("Displaying QR Codes (2/2)") == 1 diff --git a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py index 4bc73e812396..b308da9f2d7d 100644 --- a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py +++ b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py @@ -52,16 +52,18 @@ async def mint_cr_cat( CAT_AMOUNT_0 = uint64(100) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=20) - tx = await client_0.create_signed_transaction( - [ - { - "puzzle_hash": cat_puzzle.get_tree_hash(), - "amount": CAT_AMOUNT_0, - } - ], - DEFAULT_TX_CONFIG, - wallet_id=1, - ) + tx = ( + await client_0.create_signed_transactions( + [ + { + "puzzle_hash": cat_puzzle.get_tree_hash(), + "amount": CAT_AMOUNT_0, + } + ], + DEFAULT_TX_CONFIG, + wallet_id=1, + ) + ).signed_tx spend_bundle = tx.spend_bundle assert spend_bundle is not None @@ -156,12 +158,14 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: ) # Mint a VC - vc_record, _ = await client_0.vc_mint( - did_id, - wallet_environments.tx_config, - target_address=await wallet_0.get_new_puzzlehash(), - fee=uint64(1_750_000_000_000), - ) + vc_record = ( + await client_0.vc_mint( + did_id, + wallet_environments.tx_config, + target_address=await wallet_0.get_new_puzzlehash(), + fee=uint64(1_750_000_000_000), + ) + ).vc_record await wallet_environments.process_pending_states( [ @@ -344,14 +348,16 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: assert await wallet_node_0.wallet_state_manager.get_wallet_for_asset_id(cr_cat_wallet_0.get_asset_id()) is not None wallet_1_ph = await wallet_1.get_new_puzzlehash() wallet_1_addr = encode_puzzle_hash(wallet_1_ph, "txch") - tx = await client_0.cat_spend( - cr_cat_wallet_0.id(), - wallet_environments.tx_config, - uint64(90), - wallet_1_addr, - uint64(2000000000), - memos=["hey"], - ) + tx = ( + await client_0.cat_spend( + cr_cat_wallet_0.id(), + wallet_environments.tx_config, + uint64(90), + wallet_1_addr, + uint64(2000000000), + memos=["hey"], + ) + ).transaction [tx] = await wallet_node_0.wallet_state_manager.add_pending_transactions([tx]) await wallet_environments.process_pending_states( [ @@ -515,14 +521,16 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: ) # Test melting a CRCAT - tx = await client_1.cat_spend( - env_1.dealias_wallet_id("crcat"), - wallet_environments.tx_config, - uint64(20), - wallet_1_addr, - uint64(0), - cat_discrepancy=(-50, Program.to(None), Program.to(None)), - ) + tx = ( + await client_1.cat_spend( + env_1.dealias_wallet_id("crcat"), + wallet_environments.tx_config, + uint64(20), + wallet_1_addr, + uint64(0), + cat_discrepancy=(-50, Program.to(None), Program.to(None)), + ) + ).transaction [tx] = await wallet_node_1.wallet_state_manager.add_pending_transactions([tx]) await wallet_environments.process_pending_states( [ @@ -646,9 +654,11 @@ async def test_self_revoke(wallet_environments: WalletTestFramework) -> None: ) did_id: bytes32 = bytes32.from_hexstr(did_wallet.get_my_DID()) - vc_record, _ = await client_0.vc_mint( - did_id, wallet_environments.tx_config, target_address=await wallet_0.get_new_puzzlehash(), fee=uint64(200) - ) + vc_record = ( + await client_0.vc_mint( + did_id, wallet_environments.tx_config, target_address=await wallet_0.get_new_puzzlehash(), fee=uint64(200) + ) + ).vc_record await wallet_environments.process_pending_states( [ WalletStateTransition( diff --git a/chia/cmds/chia.py b/chia/cmds/chia.py index 71b6fce10cbb..cf67f4f843a1 100644 --- a/chia/cmds/chia.py +++ b/chia/cmds/chia.py @@ -134,6 +134,8 @@ def run_daemon_cmd(ctx: click.Context, wait_for_unlock: bool) -> None: def main() -> None: + import chia.cmds.signer # noqa + cli() # pylint: disable=no-value-for-parameter diff --git a/chia/cmds/cmds_util.py b/chia/cmds/cmds_util.py index 3312d020e32d..2c82fb9a3dcc 100644 --- a/chia/cmds/cmds_util.py +++ b/chia/cmds/cmds_util.py @@ -27,6 +27,7 @@ from chia.util.errors import CliRpcConnectionError, InvalidPathError from chia.util.ints import uint16 from chia.util.keychain import KeyData +from chia.util.streamable import Streamable, streamable from chia.wallet.transaction_record import TransactionRecord from chia.wallet.util.tx_config import CoinSelectionConfig, CoinSelectionConfigLoader, TXConfig, TXConfigLoader @@ -59,7 +60,7 @@ def transaction_submitted_msg(tx: TransactionRecord) -> str: def transaction_status_msg(fingerprint: int, tx_id: bytes32) -> str: - return f"Run 'chia wallet get_transaction -f {fingerprint} -tx 0x{tx_id}' to get status" + return f"Run 'chia wallet get_transaction -f {fingerprint} -tx 0x{tx_id.hex()}' to get status" async def validate_client_connection( @@ -328,6 +329,33 @@ def timelock_args(func: Callable[..., None]) -> Callable[..., None]: ) +@streamable +@dataclasses.dataclass(frozen=True) +class TransactionBundle(Streamable): + txs: List[TransactionRecord] + + +def tx_out_cmd(func: Callable[..., List[TransactionRecord]]) -> Callable[..., None]: + def original_cmd(transaction_file: Optional[str] = None, **kwargs: Any) -> None: + txs: List[TransactionRecord] = func(**kwargs) + if transaction_file is not None: + print(f"Writing transactions to file {transaction_file}:") + with open(Path(transaction_file), "wb") as file: + file.write(bytes(TransactionBundle(txs))) + + return click.option( + "--push/--no-push", help="Push the transaction to the network", type=bool, is_flag=True, default=True + )( + click.option( + "--transaction-file", + help="A file to write relevant transactions to", + type=str, + required=False, + default=None, + )(original_cmd) + ) + + @dataclasses.dataclass(frozen=True) class CMDCoinSelectionConfigLoader: min_coin_amount: CliAmount = cli_amount_none diff --git a/chia/cmds/coin_funcs.py b/chia/cmds/coin_funcs.py index bb415ae4b731..67d7eed9ed16 100644 --- a/chia/cmds/coin_funcs.py +++ b/chia/cmds/coin_funcs.py @@ -122,7 +122,8 @@ async def async_combine( target_coin_amount: CliAmount, target_coin_ids: Sequence[bytes32], largest_first: bool, -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): if number_of_coins > 500: raise ValueError(f"{number_of_coins} coins is greater then the maximum limit of 500 coins.") @@ -131,10 +132,10 @@ async def async_combine( mojo_per_unit = get_mojo_per_unit(wallet_type) except LookupError: print(f"Wallet id: {wallet_id} not found.") - return + return [] if not await wallet_client.get_synced(): print("Wallet not synced. Please wait.") - return + return [] is_xch: bool = wallet_type == WalletType.STANDARD_WALLET # this lets us know if we are directly combining Chia tx_config = CMDTXConfigLoader( max_coin_amount=max_coin_amount, @@ -160,10 +161,10 @@ async def async_combine( conf_coins = [cr for cr in conf_coins if cr.name in target_coin_ids] if len(conf_coins) == 0: print("No coins to combine.") - return + return [] if len(conf_coins) == 1: print("Only one coin found, you need at least two coins to combine.") - return + return [] if largest_first: conf_coins.sort(key=lambda r: r.coin.amount, reverse=True) else: @@ -176,15 +177,18 @@ async def async_combine( total_amount: uint128 = uint128(sum(coin.amount for coin in removals)) if is_xch and total_amount - fee <= 0: print("Total amount is less than 0 after fee, exiting.") - return + return [] target_ph: bytes32 = decode_puzzle_hash(await wallet_client.get_next_address(wallet_id, False)) additions = [{"amount": (total_amount - fee) if is_xch else total_amount, "puzzle_hash": target_ph}] - transaction: TransactionRecord = await wallet_client.send_transaction_multi( - wallet_id, additions, tx_config, removals, fee - ) + transaction: TransactionRecord = ( + await wallet_client.send_transaction_multi(wallet_id, additions, tx_config, removals, fee, push=push) + ).transaction tx_id = transaction.name.hex() - print(f"Transaction sent: {tx_id}") - print(f"To get status, use command: chia wallet get_transaction -f {fingerprint} -tx 0x{tx_id}") + if push: + print(f"Transaction sent: {tx_id}") + print(f"To get status, use command: chia wallet get_transaction -f {fingerprint} -tx 0x{tx_id}") + + return [transaction] async def async_split( @@ -197,21 +201,22 @@ async def async_split( amount_per_coin: CliAmount, target_coin_id_str: str, # TODO: [add TXConfig args] -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): target_coin_id: bytes32 = bytes32.from_hexstr(target_coin_id_str) if number_of_coins > 500: print(f"{number_of_coins} coins is greater then the maximum limit of 500 coins.") - return + return [] try: wallet_type = await get_wallet_type(wallet_id=wallet_id, wallet_client=wallet_client) mojo_per_unit = get_mojo_per_unit(wallet_type) except LookupError: print(f"Wallet id: {wallet_id} not found.") - return + return [] if not await wallet_client.get_synced(): print("Wallet not synced. Please wait.") - return + return [] is_xch: bool = wallet_type == WalletType.STANDARD_WALLET # this lets us know if we are directly spitting Chia final_amount_per_coin = amount_per_coin.convert_amount(mojo_per_unit) total_amount = final_amount_per_coin * number_of_coins @@ -225,7 +230,7 @@ async def async_split( f"is less than the total amount of the split: {total_amount / mojo_per_unit}, exiting." ) print("Try using a smaller fee or amount.") - return + return [] additions: List[Dict[str, Union[uint64, bytes32]]] = [] for i in range(number_of_coins): # for readability. # we always use new addresses @@ -236,12 +241,15 @@ async def async_split( # TODO: [add TXConfig args] ).to_tx_config(mojo_per_unit, config, fingerprint) - transaction: TransactionRecord = await wallet_client.send_transaction_multi( - wallet_id, additions, tx_config, [removal_coin_record.coin], fee - ) + transaction: TransactionRecord = ( + await wallet_client.send_transaction_multi( + wallet_id, additions, tx_config, [removal_coin_record.coin], fee, push=push + ) + ).transaction tx_id = transaction.name.hex() - print(f"Transaction sent: {tx_id}") - print(f"To get status, use command: chia wallet get_transaction -f {fingerprint} -tx 0x{tx_id}") + if push: + print(f"Transaction sent: {tx_id}") + print(f"To get status, use command: chia wallet get_transaction -f {fingerprint} -tx 0x{tx_id}") dust_threshold = config.get("xch_spam_amount", 1000000) # min amount per coin in mojo spam_filter_after_n_txs = config.get("spam_filter_after_n_txs", 200) # how many txs to wait before filtering if final_amount_per_coin < dust_threshold and wallet_type == WalletType.STANDARD_WALLET: @@ -251,3 +259,4 @@ async def async_split( f"{'will' if number_of_coins > spam_filter_after_n_txs else 'may'} not show up in your wallet unless " f"you decrease the dust limit to below {final_amount_per_coin} mojos or disable it by setting it to 0." ) + return [transaction] diff --git a/chia/cmds/coins.py b/chia/cmds/coins.py index a91f60bf35d1..bfda1d17f6c8 100644 --- a/chia/cmds/coins.py +++ b/chia/cmds/coins.py @@ -1,14 +1,16 @@ from __future__ import annotations import asyncio -from typing import Optional, Sequence +from typing import List, Optional, Sequence import click from chia.cmds import options +from chia.cmds.cmds_util import tx_out_cmd from chia.cmds.param_types import AmountParamType, Bytes32ParamType, CliAmount, cli_amount_none from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.ints import uint64 +from chia.wallet.transaction_record import TransactionRecord @click.group("coins", help="Manage your wallets coins") @@ -148,6 +150,7 @@ def list_cmd( default=False, help="Sort coins from largest to smallest or smallest to largest.", ) +@tx_out_cmd def combine_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -160,10 +163,11 @@ def combine_cmd( fee: uint64, input_coins: Sequence[bytes32], largest_first: bool, -) -> None: + push: bool, +) -> List[TransactionRecord]: from .coin_funcs import async_combine - asyncio.run( + return asyncio.run( async_combine( wallet_rpc_port=wallet_rpc_port, fingerprint=fingerprint, @@ -176,6 +180,7 @@ def combine_cmd( target_coin_amount=target_amount, target_coin_ids=input_coins, largest_first=largest_first, + push=push, ) ) @@ -206,6 +211,7 @@ def combine_cmd( required=True, ) @click.option("-t", "--target-coin-id", type=str, required=True, help="The coin id of the coin we are splitting.") +@tx_out_cmd def split_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -214,10 +220,11 @@ def split_cmd( fee: uint64, amount_per_coin: CliAmount, target_coin_id: str, -) -> None: + push: bool, +) -> List[TransactionRecord]: from .coin_funcs import async_split - asyncio.run( + return asyncio.run( async_split( wallet_rpc_port=wallet_rpc_port, fingerprint=fingerprint, @@ -226,5 +233,6 @@ def split_cmd( number_of_coins=number_of_coins, amount_per_coin=amount_per_coin, target_coin_id_str=target_coin_id, + push=push, ) ) diff --git a/chia/cmds/dao.py b/chia/cmds/dao.py index 4e5483c7e1a4..1fdc76c3e686 100644 --- a/chia/cmds/dao.py +++ b/chia/cmds/dao.py @@ -1,16 +1,17 @@ from __future__ import annotations import asyncio -from typing import Optional, Sequence +from typing import List, Optional, Sequence import click from chia.cmds import options -from chia.cmds.cmds_util import CMDTXConfigLoader, tx_config_args +from chia.cmds.cmds_util import CMDTXConfigLoader, tx_config_args, tx_out_cmd from chia.cmds.param_types import AmountParamType, Bytes32ParamType, CliAmount, TransactionFeeParamType, Uint64ParamType from chia.cmds.units import units from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.ints import uint64 +from chia.wallet.transaction_record import TransactionRecord @click.group("dao", short_help="Create, manage or show state of DAOs", no_args_is_help=True) @@ -144,6 +145,7 @@ def dao_add_cmd( show_default=True, ) @tx_config_args +@tx_out_cmd def dao_create_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -164,7 +166,8 @@ def dao_create_cmd( coins_to_exclude: Sequence[bytes32], amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], -) -> None: + push: bool, +) -> List[TransactionRecord]: from .dao_funcs import create_dao_wallet if self_destruct == proposal_timelock: @@ -172,7 +175,7 @@ def dao_create_cmd( print("Creating new DAO") - asyncio.run( + return asyncio.run( create_dao_wallet( wallet_rpc_port, fingerprint, @@ -195,6 +198,7 @@ def dao_create_cmd( excluded_coin_amounts=list(amounts_to_exclude), reuse_puzhash=reuse, ), + push, ) ) @@ -249,6 +253,7 @@ def dao_get_id_cmd( ) @options.create_fee() @tx_config_args +@tx_out_cmd def dao_add_funds_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -261,10 +266,11 @@ def dao_add_funds_cmd( coins_to_exclude: Sequence[bytes32], amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], -) -> None: + push: bool, +) -> List[TransactionRecord]: from .dao_funcs import add_funds_to_treasury - asyncio.run( + return asyncio.run( add_funds_to_treasury( wallet_rpc_port, fingerprint, @@ -279,6 +285,7 @@ def dao_add_funds_cmd( excluded_coin_amounts=list(amounts_to_exclude), reuse_puzhash=reuse, ), + push, ) ) @@ -421,6 +428,7 @@ def dao_show_proposal_cmd( ) @options.create_fee() @tx_config_args +@tx_out_cmd def dao_vote_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -434,12 +442,13 @@ def dao_vote_cmd( coins_to_exclude: Sequence[bytes32], amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], -) -> None: + push: bool, +) -> List[TransactionRecord]: from .dao_funcs import vote_on_proposal is_yes_vote = False if vote_no else True - asyncio.run( + return asyncio.run( vote_on_proposal( wallet_rpc_port, fingerprint, @@ -455,6 +464,7 @@ def dao_vote_cmd( excluded_coin_amounts=list(amounts_to_exclude), reuse_puzhash=reuse, ), + push, ) ) @@ -489,6 +499,7 @@ def dao_vote_cmd( ) @options.create_fee() @tx_config_args +@tx_out_cmd def dao_close_proposal_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -501,10 +512,11 @@ def dao_close_proposal_cmd( coins_to_exclude: Sequence[bytes32], amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], -) -> None: + push: bool, +) -> List[TransactionRecord]: from .dao_funcs import close_proposal - asyncio.run( + return asyncio.run( close_proposal( wallet_rpc_port, fingerprint, @@ -519,6 +531,7 @@ def dao_close_proposal_cmd( excluded_coin_amounts=list(amounts_to_exclude), reuse_puzhash=reuse, ), + push, ) ) @@ -546,6 +559,7 @@ def dao_close_proposal_cmd( ) @options.create_fee() @tx_config_args +@tx_out_cmd def dao_lockup_coins_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -557,10 +571,11 @@ def dao_lockup_coins_cmd( coins_to_exclude: Sequence[bytes32], amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], -) -> None: + push: bool, +) -> List[TransactionRecord]: from .dao_funcs import lockup_coins - asyncio.run( + return asyncio.run( lockup_coins( wallet_rpc_port, fingerprint, @@ -574,6 +589,7 @@ def dao_lockup_coins_cmd( excluded_coin_amounts=list(amounts_to_exclude), reuse_puzhash=reuse, ), + push, ) ) @@ -590,6 +606,7 @@ def dao_lockup_coins_cmd( @click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True) @options.create_fee() @tx_config_args +@tx_out_cmd def dao_release_coins_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -600,10 +617,11 @@ def dao_release_coins_cmd( coins_to_exclude: Sequence[bytes32], amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], -) -> None: + push: bool = True, +) -> List[TransactionRecord]: from .dao_funcs import release_coins - asyncio.run( + return asyncio.run( release_coins( wallet_rpc_port, fingerprint, @@ -616,6 +634,7 @@ def dao_release_coins_cmd( excluded_coin_amounts=list(amounts_to_exclude), reuse_puzhash=reuse, ), + push, ) ) @@ -632,6 +651,7 @@ def dao_release_coins_cmd( @click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True) @options.create_fee() @tx_config_args +@tx_out_cmd def dao_exit_lockup_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -642,10 +662,11 @@ def dao_exit_lockup_cmd( coins_to_exclude: Sequence[bytes32], amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], -) -> None: + push: bool, +) -> List[TransactionRecord]: from .dao_funcs import exit_lockup - asyncio.run( + return asyncio.run( exit_lockup( wallet_rpc_port, fingerprint, @@ -658,6 +679,7 @@ def dao_exit_lockup_cmd( excluded_coin_amounts=list(amounts_to_exclude), reuse_puzhash=reuse, ), + push, ) ) @@ -723,6 +745,7 @@ def dao_proposal(ctx: click.Context) -> None: ) @options.create_fee() @tx_config_args +@tx_out_cmd def dao_create_spend_proposal_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -738,10 +761,11 @@ def dao_create_spend_proposal_cmd( coins_to_exclude: Sequence[bytes32], amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], -) -> None: + push: bool, +) -> List[TransactionRecord]: from .dao_funcs import create_spend_proposal - asyncio.run( + return asyncio.run( create_spend_proposal( wallet_rpc_port, fingerprint, @@ -759,6 +783,7 @@ def dao_create_spend_proposal_cmd( excluded_coin_amounts=list(amounts_to_exclude), reuse_puzhash=reuse, ), + push, ) ) @@ -825,6 +850,7 @@ def dao_create_spend_proposal_cmd( ) @options.create_fee() @tx_config_args +@tx_out_cmd def dao_create_update_proposal_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -842,10 +868,11 @@ def dao_create_update_proposal_cmd( coins_to_exclude: Sequence[bytes32], amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], -) -> None: + push: bool, +) -> List[TransactionRecord]: from .dao_funcs import create_update_proposal - asyncio.run( + return asyncio.run( create_update_proposal( wallet_rpc_port, fingerprint, @@ -865,6 +892,7 @@ def dao_create_update_proposal_cmd( excluded_coin_amounts=list(amounts_to_exclude), reuse_puzhash=reuse, ), + push, ) ) @@ -904,6 +932,7 @@ def dao_create_update_proposal_cmd( ) @options.create_fee() @tx_config_args +@tx_out_cmd def dao_create_mint_proposal_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -917,10 +946,11 @@ def dao_create_mint_proposal_cmd( coins_to_exclude: Sequence[bytes32], amounts_to_exclude: Sequence[CliAmount], reuse: Optional[bool], -) -> None: + push: bool, +) -> List[TransactionRecord]: from .dao_funcs import create_mint_proposal - asyncio.run( + return asyncio.run( create_mint_proposal( wallet_rpc_port, fingerprint, @@ -936,6 +966,7 @@ def dao_create_mint_proposal_cmd( excluded_coin_amounts=list(amounts_to_exclude), reuse_puzhash=reuse, ), + push, ) ) diff --git a/chia/cmds/dao_funcs.py b/chia/cmds/dao_funcs.py index e81e38d32f2a..30bca709b5ca 100644 --- a/chia/cmds/dao_funcs.py +++ b/chia/cmds/dao_funcs.py @@ -4,7 +4,7 @@ import json import time from decimal import Decimal -from typing import Optional +from typing import List, Optional from chia.cmds.cmds_util import CMDTXConfigLoader, get_wallet_client, transaction_status_msg, transaction_submitted_msg from chia.cmds.param_types import CliAmount @@ -14,6 +14,7 @@ from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash from chia.util.config import selected_network_address_prefix from chia.util.ints import uint64 +from chia.wallet.transaction_record import TransactionRecord from chia.wallet.util.tx_config import DEFAULT_COIN_SELECTION_CONFIG from chia.wallet.util.wallet_types import WalletType @@ -36,10 +37,10 @@ async def add_dao_wallet( ) print("Successfully created DAO Wallet") - print(f"DAO Treasury ID: {res['treasury_id']}") - print(f"DAO Wallet ID: {res['wallet_id']}") - print(f"CAT Wallet ID: {res['cat_wallet_id']}") - print(f"DAOCAT Wallet ID: {res['dao_cat_wallet_id']}") + print(f"DAO Treasury ID: {res.treasury_id.hex()}") + print(f"DAO Wallet ID: {res.wallet_id}") + print(f"CAT Wallet ID: {res.cat_wallet_id}") + print(f"DAOCAT Wallet ID: {res.dao_cat_wallet_id}") async def create_dao_wallet( @@ -58,7 +59,8 @@ async def create_dao_wallet( filter_amount: uint64, cat_amount: CliAmount, cli_tx_config: CMDTXConfigLoader, -) -> None: + push: bool, +) -> List[TransactionRecord]: if proposal_minimum % 2 == 0: proposal_minimum = uint64(1 + proposal_minimum) print("Adding 1 mojo to proposal minimum amount") @@ -89,13 +91,16 @@ async def create_dao_wallet( fee=fee, fee_for_cat=fee_for_cat, tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), + push=push, ) - print("Successfully created DAO Wallet") - print(f"DAO Treasury ID: {res['treasury_id']}") - print(f"DAO Wallet ID: {res['wallet_id']}") - print(f"CAT Wallet ID: {res['cat_wallet_id']}") - print(f"DAOCAT Wallet ID: {res['dao_cat_wallet_id']}") + if push: + print("Successfully created DAO Wallet") + print(f"DAO Treasury ID: {res.treasury_id.hex()}") + print(f"DAO Wallet ID: {res.wallet_id}") + print(f"CAT Wallet ID: {res.cat_wallet_id}") + print(f"DAOCAT Wallet ID: {res.dao_cat_wallet_id}") + return res.transactions async def get_treasury_id(wallet_rpc_port: Optional[int], fp: int, wallet_id: int) -> None: @@ -121,14 +126,15 @@ async def add_funds_to_treasury( amount: CliAmount, fee: uint64, cli_tx_config: CMDTXConfigLoader, -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: typ = await get_wallet_type(wallet_id=funding_wallet_id, wallet_client=wallet_client) mojo_per_unit = get_mojo_per_unit(typ) except LookupError: # pragma: no cover print(f"Wallet id: {wallet_id} not found.") - return + return [] res = await wallet_client.dao_add_funds_to_treasury( wallet_id=wallet_id, @@ -136,19 +142,22 @@ async def add_funds_to_treasury( amount=amount.convert_amount(mojo_per_unit), fee=fee, tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), + push=push, ) - tx_id = res["tx_id"] - start = time.time() - while time.time() - start < 10: - await asyncio.sleep(0.1) - tx = await wallet_client.get_transaction(bytes32.from_hexstr(tx_id)) - if len(tx.sent_to) > 0: - print(transaction_submitted_msg(tx)) - print(transaction_status_msg(fingerprint, tx_id[2:])) - return None + if push: + start = time.time() + while time.time() - start < 10: + await asyncio.sleep(0.1) + tx = await wallet_client.get_transaction(res.tx_id) + if len(tx.sent_to) > 0: + print(transaction_submitted_msg(tx)) + print(transaction_status_msg(fingerprint, res.tx_id)) + return res.transactions - print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover + if push: + print(f"Transaction not yet submitted to nodes. TX ID: {res.tx_id.hex()}") + return res.transactions async def get_treasury_balance(wallet_rpc_port: Optional[int], fp: int, wallet_id: int) -> None: @@ -269,7 +278,8 @@ async def vote_on_proposal( is_yes_vote: bool, fee: uint64, cli_tx_config: CMDTXConfigLoader, -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_vote_on_proposal( wallet_id=wallet_id, @@ -278,18 +288,21 @@ async def vote_on_proposal( is_yes_vote=is_yes_vote, fee=fee, tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), + push=push, ) - tx_id = res["tx_id"] - start = time.time() - while time.time() - start < 10: - await asyncio.sleep(0.1) - tx = await wallet_client.get_transaction(bytes32.from_hexstr(tx_id)) - if len(tx.sent_to) > 0: - print(transaction_submitted_msg(tx)) - print(transaction_status_msg(fingerprint, tx_id[2:])) - return None + if push: + start = time.time() + while time.time() - start < 10: + await asyncio.sleep(0.1) + tx = await wallet_client.get_transaction(res.tx_id) + if len(tx.sent_to) > 0: + print(transaction_submitted_msg(tx)) + print(transaction_status_msg(fingerprint, res.tx_id)) + return res.transactions - print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover + if push: + print(f"Transaction not yet submitted to nodes. TX ID: {res.tx_id.hex()}") + return res.transactions async def close_proposal( @@ -300,7 +313,8 @@ async def close_proposal( proposal_id: str, self_destruct: bool, cli_tx_config: CMDTXConfigLoader, -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_close_proposal( wallet_id=wallet_id, @@ -308,18 +322,22 @@ async def close_proposal( fee=fee, self_destruct=self_destruct, tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), + push=push, ) - tx_id = res["tx_id"] - start = time.time() - while time.time() - start < 10: - await asyncio.sleep(0.1) - tx = await wallet_client.get_transaction(bytes32.from_hexstr(tx_id)) - if len(tx.sent_to) > 0: - print(transaction_submitted_msg(tx)) - print(transaction_status_msg(fingerprint, tx_id[2:])) - return None - print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover + if push: + start = time.time() + while time.time() - start < 10: + await asyncio.sleep(0.1) + tx = await wallet_client.get_transaction(res.tx_id) + if len(tx.sent_to) > 0: + print(transaction_submitted_msg(tx)) + print(transaction_status_msg(fingerprint, res.tx_id)) + return res.transactions + + if push: + print(f"Transaction not yet submitted to nodes. TX ID: {res.tx_id.hex()}") + return res.transactions async def lockup_coins( @@ -329,7 +347,8 @@ async def lockup_coins( amount: CliAmount, fee: uint64, cli_tx_config: CMDTXConfigLoader, -) -> None: + push: bool, +) -> List[TransactionRecord]: final_amount: uint64 = amount.convert_amount(units["cat"]) async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_send_to_lockup( @@ -337,18 +356,22 @@ async def lockup_coins( amount=final_amount, fee=fee, tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), + push=push, ) - tx_id = res["tx_id"] - start = time.time() - while time.time() - start < 10: - await asyncio.sleep(0.1) - tx = await wallet_client.get_transaction(bytes32.from_hexstr(tx_id)) - if len(tx.sent_to) > 0: - print(transaction_submitted_msg(tx)) - print(transaction_status_msg(fingerprint, tx_id[2:])) - return None + if push: + start = time.time() + while time.time() - start < 10: + await asyncio.sleep(0.1) + tx = await wallet_client.get_transaction(res.tx_id) + if len(tx.sent_to) > 0: + print(transaction_submitted_msg(tx)) + print(transaction_status_msg(fingerprint, res.tx_id)) + return res.transactions + + if push: + print(f"Transaction not yet submitted to nodes. TX ID: {res.tx_id.hex()}") - print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover + return res.transactions async def release_coins( @@ -357,23 +380,28 @@ async def release_coins( wallet_id: int, fee: uint64, cli_tx_config: CMDTXConfigLoader, -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_free_coins_from_finished_proposals( wallet_id=wallet_id, fee=fee, tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), + push=push, ) - tx_id = res["tx_id"] - start = time.time() - while time.time() - start < 10: - await asyncio.sleep(0.1) - tx = await wallet_client.get_transaction(bytes32.from_hexstr(tx_id)) - if len(tx.sent_to) > 0: - print(transaction_submitted_msg(tx)) - print(transaction_status_msg(fingerprint, tx_id[2:])) - return None - print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover + if push: + start = time.time() + while time.time() - start < 10: + await asyncio.sleep(0.1) + tx = await wallet_client.get_transaction(res.tx_id) + if len(tx.sent_to) > 0: + print(transaction_submitted_msg(tx)) + print(transaction_status_msg(fingerprint, res.tx_id)) + return res.transactions + + if push: + print(f"Transaction not yet submitted to nodes. TX ID: {res.tx_id.hex()}") + return res.transactions async def exit_lockup( @@ -382,24 +410,30 @@ async def exit_lockup( wallet_id: int, fee: uint64, cli_tx_config: CMDTXConfigLoader, -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_exit_lockup( wallet_id=wallet_id, coins=[], fee=fee, tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), + push=push, ) - tx_id = res["tx_id"] - start = time.time() - while time.time() - start < 10: - await asyncio.sleep(0.1) - tx = await wallet_client.get_transaction(bytes32.from_hexstr(tx_id)) - if len(tx.sent_to) > 0: - print(transaction_submitted_msg(tx)) - print(transaction_status_msg(fingerprint, tx_id[2:])) - return None - print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover + + if push: + start = time.time() + while time.time() - start < 10: + await asyncio.sleep(0.1) + tx = await wallet_client.get_transaction(res.tx_id) + if len(tx.sent_to) > 0: + print(transaction_submitted_msg(tx)) + print(transaction_status_msg(fingerprint, res.tx_id)) + return res.transactions + + if push: + print(f"Transaction not yet submitted to nodes. TX ID: {res.tx_id.hex()}") + return res.transactions async def create_spend_proposal( @@ -413,7 +447,8 @@ async def create_spend_proposal( asset_id: Optional[str], additions_file: Optional[str], cli_tx_config: CMDTXConfigLoader, -) -> None: + push: bool, +) -> List[TransactionRecord]: if additions_file is None and (address is None or amount is None): raise ValueError("Must include a json specification or an address / amount pair.") if additions_file: # pragma: no cover @@ -440,14 +475,15 @@ async def create_spend_proposal( vote_amount=vote_amount, fee=fee, tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), + push=push, ) - if res["success"]: - asset_id_name = asset_id if asset_id else "XCH" - print(f"Created spend proposal for asset: {asset_id_name}") + + asset_id_name = asset_id if asset_id else "XCH" + print(f"Created spend proposal for asset: {asset_id_name}") + if push: print("Successfully created proposal.") - print(f"Proposal ID: {res['proposal_id']}") - else: # pragma: no cover - print("Failed to create proposal.") + print(f"Proposal ID: {res.proposal_id.hex()}") + return res.transactions async def create_update_proposal( @@ -463,7 +499,8 @@ async def create_update_proposal( self_destruct_length: Optional[uint64], oracle_spend_delay: Optional[uint64], cli_tx_config: CMDTXConfigLoader, -) -> None: + push: bool, +) -> List[TransactionRecord]: new_dao_rules = { "proposal_timelock": proposal_timelock, "soft_close_length": soft_close_length, @@ -480,12 +517,13 @@ async def create_update_proposal( vote_amount=vote_amount, fee=fee, tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), + push=push, ) - if res["success"]: + + if push: print("Successfully created proposal.") - print(f"Proposal ID: {res['proposal_id']}") - else: # pragma: no cover - print("Failed to create proposal.") + print(f"Proposal ID: {res.proposal_id.hex()}") + return res.transactions async def create_mint_proposal( @@ -497,7 +535,8 @@ async def create_mint_proposal( cat_target_address: str, vote_amount: Optional[int], cli_tx_config: CMDTXConfigLoader, -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_create_proposal( wallet_id=wallet_id, @@ -507,9 +546,10 @@ async def create_mint_proposal( vote_amount=vote_amount, fee=fee, tx_config=cli_tx_config.to_tx_config(units["chia"], config, fingerprint), + push=push, ) - if res["success"]: + + if push: print("Successfully created proposal.") - print(f"Proposal ID: {res['proposal_id']}") - else: # pragma: no cover - print("Failed to create proposal.") + print(f"Proposal ID: {res.proposal_id.hex()}") + return res.transactions diff --git a/chia/cmds/data.py b/chia/cmds/data.py index c46c1b4c358b..426a0a092256 100644 --- a/chia/cmds/data.py +++ b/chia/cmds/data.py @@ -136,6 +136,9 @@ def create_max_page_size_option() -> Callable[[FC], FC]: ) +# Functions with this mark in this file are not being ported to @tx_out_cmd due to API peculiarities +# They will therefore not work with observer-only functionality +# NOTE: tx_endpoint (This creates wallet transactions and should be parametrized by relevant options) @data_cmd.command("create_data_store", help="Create a new data store") @create_rpc_port_option() @options.create_fee() @@ -170,6 +173,7 @@ def get_value( run(get_value_cmd(data_rpc_port, id, key_string, root_hash, fingerprint=fingerprint)) +# NOTE: tx_endpoint @data_cmd.command("update_data_store", help="Update a store by providing the changelist operations") @create_data_store_id_option() @create_changelist_option() @@ -468,6 +472,7 @@ def add_missing_files( ) +# NOTE: tx_endpoint @data_cmd.command("add_mirror", help="Publish mirror urls on chain") @create_data_store_id_option() @click.option( @@ -506,6 +511,7 @@ def add_mirror( ) +# NOTE: tx_endpoint @data_cmd.command("delete_mirror", help="Delete an owned mirror by its coin id") @click.option("-c", "--coin_id", help="Coin id", type=Bytes32ParamType(), required=True) @options.create_fee() diff --git a/chia/cmds/plotnft.py b/chia/cmds/plotnft.py index 53b59f754652..88366522d689 100644 --- a/chia/cmds/plotnft.py +++ b/chia/cmds/plotnft.py @@ -43,6 +43,9 @@ def get_login_link_cmd(launcher_id: bytes32) -> None: asyncio.run(get_login_link(launcher_id)) +# Functions with this mark in this file are not being ported to @tx_out_cmd due to lack of observer key support +# They will therefore not work with observer-only functionality +# NOTE: tx_endpoint (This creates wallet transactions and should be parametrized by relevant options) @plotnft_cmd.command("create", help="Create a plot NFT") @click.option("-y", "--yes", "dont_prompt", help="No prompts", is_flag=True) @options.create_fingerprint() @@ -82,6 +85,7 @@ def create_cmd( ) +# NOTE: tx_endpoint @plotnft_cmd.command("join", help="Join a plot NFT to a Pool") @click.option("-y", "--yes", "dont_prompt", help="No prompts", is_flag=True) @click.option("-i", "--id", help="ID of the wallet to use", type=int, default=None, show_default=True, required=True) @@ -114,6 +118,7 @@ def join_cmd( ) +# NOTE: tx_endpoint @plotnft_cmd.command("leave", help="Leave a pool and return to self-farming") @click.option("-y", "--yes", "dont_prompt", help="No prompts", is_flag=True) @click.option("-i", "--id", help="ID of the wallet to use", type=int, default=None, show_default=True, required=True) @@ -160,6 +165,7 @@ def inspect(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> None: asyncio.run(inspect_cmd(wallet_rpc_port, fingerprint, id)) +# NOTE: tx_endpoint @plotnft_cmd.command("claim", help="Claim rewards from a plot NFT") @click.option("-i", "--id", help="ID of the wallet to use", type=int, default=None, show_default=True, required=True) @options.create_fingerprint() diff --git a/chia/cmds/signer.py b/chia/cmds/signer.py new file mode 100644 index 000000000000..0227ed59df63 --- /dev/null +++ b/chia/cmds/signer.py @@ -0,0 +1,306 @@ +from __future__ import annotations + +import itertools +import os +import time +from dataclasses import replace +from functools import cached_property +from pathlib import Path +from threading import Event, Thread +from typing import List, Sequence, Type, TypeVar + +import click +from chia_rs import AugSchemeMPL, G2Element +from hsms.util.byte_chunks import create_chunks_for_blob, optimal_chunk_size_for_max_chunk_size +from segno import QRCode, make_qr + +from chia.cmds.cmd_classes import NeedsWalletRPC, chia_command, command_helper, option +from chia.cmds.cmds_util import TransactionBundle +from chia.cmds.wallet import wallet_cmd +from chia.rpc.util import ALL_TRANSLATION_LAYERS +from chia.rpc.wallet_request_types import ApplySignatures, ExecuteSigningInstructions, GatherSigningInfo +from chia.types.spend_bundle import SpendBundle +from chia.util.streamable import Streamable +from chia.wallet.signer_protocol import SignedTransaction, SigningInstructions, SigningResponse, Spend +from chia.wallet.transaction_record import TransactionRecord +from chia.wallet.util.clvm_streamable import byte_deserialize_clvm_streamable, byte_serialize_clvm_streamable + + +def _clear_screen() -> None: + # Cross-platform screen clear + os.system("cls" if os.name == "nt" else "clear") + + +@wallet_cmd.group("signer", help="Get information for an external signer") +def signer_cmd() -> None: + pass # pragma: no cover + + +@command_helper +class QrCodeDisplay: + qr_density: int = option( + "--qr-density", + "-d", + type=int, + help="The maximum number of bytes contained in a single qr code", + default=100, + show_default=True, + ) + rotation_speed: int = option( + "--rotation-speed", + "-w", + type=int, + help="How many seconds delay between switching QR codes when there are multiple", + default=2, + show_default=True, + ) + + def _display_qr(self, index: int, max_index: int, code_list: List[QRCode], stop_event: Event) -> None: + while not stop_event.is_set(): + for qr_code in itertools.cycle(code_list): + _clear_screen() + qr_code.terminal(compact=True) + print(f"Displaying QR Codes ({index + 1}/{max_index})") + print("") + + for _ in range(self.rotation_speed * 100): + time.sleep(0.01) + if stop_event.is_set(): + return + + def display_qr_codes(self, blobs: List[bytes]) -> None: + chunk_sizes = [optimal_chunk_size_for_max_chunk_size(len(blob), self.qr_density) for blob in blobs] + chunks = [create_chunks_for_blob(blob, chunk_size) for blob, chunk_size in zip(blobs, chunk_sizes)] + qr_codes = [[make_qr(chunk) for chunk in chks] for chks in chunks] + + for i, qr_code_list in enumerate(qr_codes): + stop_event = Event() + t = Thread(target=self._display_qr, args=(i, len(qr_codes), qr_code_list, stop_event)) + t.start() + + try: + input("") + finally: + stop_event.set() + t.join() + stop_event.clear() + + +@command_helper +class TransactionsIn: + transaction_file_in: str = option( + "--transaction-file-in", + "-i", + type=str, + help="Transaction file to use as input", + required=True, + ) + + @cached_property + def transaction_bundle(self) -> TransactionBundle: + with open(Path(self.transaction_file_in), "rb") as file: + return TransactionBundle.from_bytes(file.read()) + + +@command_helper +class TransactionsOut: + transaction_file_out: str = option( + "--transaction-file-out", + "-o", + type=str, + help="Transaction filename to use as output", + required=True, + ) + + def handle_transaction_output(self, output: List[TransactionRecord]) -> None: + with open(Path(self.transaction_file_out), "wb") as file: + file.write(bytes(TransactionBundle(output))) + + +@command_helper +class _SPTranslation: + translation: str = option( + "--translation", + "-c", + type=click.Choice(["none", "CHIP-0028"]), + default="none", + help="Wallet Signer Protocol CHIP to use for translation of output", + ) + + +_T_ClvmStreamable = TypeVar("_T_ClvmStreamable", bound=Streamable) + + +@command_helper +class SPIn(_SPTranslation): + signer_protocol_input: Sequence[str] = option( + "--signer-protocol-input", + "-p", + type=str, + help="Signer protocol objects (signatures, signing instructions, etc.) as files to load as input", + multiple=True, + required=True, + ) + + def read_sp_input(self, typ: Type[_T_ClvmStreamable]) -> List[_T_ClvmStreamable]: + final_list: List[_T_ClvmStreamable] = [] + for filename in self.signer_protocol_input: # pylint: disable=not-an-iterable + with open(Path(filename), "rb") as file: + final_list.append( + byte_deserialize_clvm_streamable( + file.read(), + typ, + translation_layer=( + ALL_TRANSLATION_LAYERS[self.translation] if self.translation != "none" else None + ), + ) + ) + + return final_list + + +@command_helper +class SPOut(QrCodeDisplay, _SPTranslation): + output_format: str = option( + "--output-format", + "-t", + type=click.Choice(["hex", "file", "qr"]), + default="hex", + help="How to output the information to transfer to an external signer", + ) + output_file: Sequence[str] = option( + "--output-file", + "-b", + type=str, + multiple=True, + help="The file(s) to output to (if --output-format=file)", + ) + + def handle_clvm_output(self, outputs: List[Streamable]) -> None: + translation_layer = ALL_TRANSLATION_LAYERS[self.translation] if self.translation != "none" else None + if self.output_format == "hex": + for output in outputs: + print(byte_serialize_clvm_streamable(output, translation_layer=translation_layer).hex()) + if self.output_format == "file": + if len(self.output_file) == 0: + print("--output-format=file specifed without any --output-file") + return + elif len(self.output_file) != len(outputs): + print( + "Incorrect number of file outputs specified, " + f"expected: {len(outputs)} got {len(self.output_file)}" + ) + return + else: + for filename, output in zip(self.output_file, outputs): + with open(Path(filename), "wb") as file: + file.write(byte_serialize_clvm_streamable(output, translation_layer=translation_layer)) + if self.output_format == "qr": + self.display_qr_codes( + [byte_serialize_clvm_streamable(output, translation_layer=translation_layer) for output in outputs] + ) + + +@chia_command( + signer_cmd, + "gather_signing_info", + "Gather the information from a transaction that a signer needs in order to create a signature", +) +class GatherSigningInfoCMD: + sp_out: SPOut + txs_in: TransactionsIn + rpc_info: NeedsWalletRPC + + async def run(self) -> None: + async with self.rpc_info.wallet_rpc() as wallet_rpc: + spends: List[Spend] = [ + Spend.from_coin_spend(cs) + for tx in self.txs_in.transaction_bundle.txs + if tx.spend_bundle is not None + for cs in tx.spend_bundle.coin_spends + ] + signing_instructions: SigningInstructions = ( + await wallet_rpc.client.gather_signing_info(GatherSigningInfo(spends=spends)) + ).signing_instructions + self.sp_out.handle_clvm_output([signing_instructions]) + + +@chia_command(signer_cmd, "apply_signatures", "Apply a signer's signatures to a transaction bundle") +class ApplySignaturesCMD: + txs_out: TransactionsOut + sp_in: SPIn + txs_in: TransactionsIn + rpc_info: NeedsWalletRPC + + async def run(self) -> None: + async with self.rpc_info.wallet_rpc() as wallet_rpc: + signing_responses: List[SigningResponse] = self.sp_in.read_sp_input(SigningResponse) + spends: List[Spend] = [ + Spend.from_coin_spend(cs) + for tx in self.txs_in.transaction_bundle.txs + if tx.spend_bundle is not None + for cs in tx.spend_bundle.coin_spends + ] + signed_transactions: List[SignedTransaction] = ( + await wallet_rpc.client.apply_signatures( + ApplySignatures(spends=spends, signing_responses=signing_responses) + ) + ).signed_transactions + signed_spends: List[Spend] = [spend for tx in signed_transactions for spend in tx.transaction_info.spends] + final_signature: G2Element = G2Element() + for signature in [sig for tx in signed_transactions for sig in tx.signatures]: + if signature.type != "bls_12381_aug_scheme": # pragma: no cover + print("No external spot for non BLS signatures in a spend") + return + final_signature = AugSchemeMPL.aggregate([final_signature, G2Element.from_bytes(signature.signature)]) + new_spend_bundle: SpendBundle = SpendBundle( + [spend.as_coin_spend() for spend in signed_spends], final_signature + ) + new_transactions: List[TransactionRecord] = [ + replace( + self.txs_in.transaction_bundle.txs[0], spend_bundle=new_spend_bundle, name=new_spend_bundle.name() + ), + *(replace(tx, spend_bundle=None) for tx in self.txs_in.transaction_bundle.txs[1:]), + ] + self.txs_out.handle_transaction_output(new_transactions) + + +@chia_command(signer_cmd, "execute_signing_instructions", "Given some signing instructions, return signing responses") +class ExecuteSigningInstructionsCMD: + sp_out: SPOut + sp_in: SPIn + rpc_info: NeedsWalletRPC + + async def run(self) -> None: + async with self.rpc_info.wallet_rpc() as wallet_rpc: + signing_instructions: List[SigningInstructions] = self.sp_in.read_sp_input(SigningInstructions) + self.sp_out.handle_clvm_output( + [ + signing_response + for instruction_set in signing_instructions + for signing_response in ( + await wallet_rpc.client.execute_signing_instructions( + ExecuteSigningInstructions(signing_instructions=instruction_set, partial_allowed=True) + ) + ).signing_responses + ] + ) + + +@chia_command(wallet_cmd, "push_transactions", "Push a transaction bundle to the wallet to send to the network") +class PushTransactionsCMD: + txs_in: TransactionsIn + rpc_info: NeedsWalletRPC + + async def run(self) -> None: + async with self.rpc_info.wallet_rpc() as wallet_rpc: + await wallet_rpc.client.push_transactions(self.txs_in.transaction_bundle.txs) + + +# Uncomment this for testing of qr code display +# @chia_command(signer_cmd, "temp", "") +# class Temp: +# qr: QrCodeDisplay +# +# def run(self) -> None: +# self.qr.display_qr_codes([bytes([1] * 200), bytes([2] * 200)]) diff --git a/chia/cmds/wallet.py b/chia/cmds/wallet.py index f9986a5254e4..d10a6a0d396e 100644 --- a/chia/cmds/wallet.py +++ b/chia/cmds/wallet.py @@ -8,6 +8,7 @@ from chia.cmds import options from chia.cmds.check_wallet_db import help_text as check_help_text +from chia.cmds.cmds_util import tx_out_cmd from chia.cmds.coins import coins_cmd from chia.cmds.param_types import ( AddressParamType, @@ -19,6 +20,7 @@ ) from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.ints import uint32, uint64 +from chia.wallet.transaction_record import TransactionRecord from chia.wallet.transaction_sorting import SortKey from chia.wallet.util.address_type import AddressType from chia.wallet.util.wallet_types import WalletType @@ -194,6 +196,7 @@ def get_transactions_cmd( type=int, default=0, ) +@tx_out_cmd def send_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -208,10 +211,11 @@ def send_cmd( coins_to_exclude: Sequence[bytes32], reuse: bool, clawback_time: int, -) -> None: # pragma: no cover + push: bool, +) -> List[TransactionRecord]: from .wallet_funcs import send - asyncio.run( + return asyncio.run( send( wallet_rpc_port=wallet_rpc_port, fp=fingerprint, @@ -226,6 +230,7 @@ def send_cmd( excluded_coin_ids=coins_to_exclude, reuse_puzhash=True if reuse else None, clawback_time_lock=clawback_time, + push=push, ) ) @@ -308,13 +313,16 @@ def get_address_cmd(wallet_rpc_port: Optional[int], id: int, fingerprint: int, n is_flag=True, default=False, ) +@tx_out_cmd def clawback( - wallet_rpc_port: Optional[int], id: int, fingerprint: int, tx_ids: str, fee: uint64, force: bool -) -> None: # pragma: no cover + wallet_rpc_port: Optional[int], id: int, fingerprint: int, tx_ids: str, fee: uint64, force: bool, push: bool +) -> List[TransactionRecord]: from .wallet_funcs import spend_clawback - asyncio.run( - spend_clawback(wallet_rpc_port=wallet_rpc_port, fp=fingerprint, fee=fee, tx_ids_str=tx_ids, force=force) + return asyncio.run( + spend_clawback( + wallet_rpc_port=wallet_rpc_port, fp=fingerprint, fee=fee, tx_ids_str=tx_ids, force=force, push=push + ) ) @@ -461,6 +469,8 @@ def add_token_cmd(wallet_rpc_port: Optional[int], asset_id: bytes32, token_name: default=False, ) @click.option("--override", help="Creates offer without checking for unusual values", is_flag=True, default=False) +# This command looks like a good candidate for @tx_out_cmd however, pushing an incomplete tx is nonsensical and +# we already have a canonical offer file format which the idea of exporting a different transaction conflicts with def make_offer_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -557,6 +567,7 @@ def get_offers_cmd( is_flag=True, default=False, ) +@tx_out_cmd def take_offer_cmd( path_or_hex: str, wallet_rpc_port: Optional[int], @@ -564,10 +575,11 @@ def take_offer_cmd( examine_only: bool, fee: uint64, reuse: bool, -) -> None: + push: bool, +) -> List[TransactionRecord]: from .wallet_funcs import take_offer - asyncio.run(take_offer(wallet_rpc_port, fingerprint, fee, path_or_hex, examine_only)) + return asyncio.run(take_offer(wallet_rpc_port, fingerprint, fee, path_or_hex, examine_only, push)) @wallet_cmd.command("cancel_offer", help="Cancel an existing offer") @@ -582,12 +594,13 @@ def take_offer_cmd( @click.option("-id", "--id", help="The offer ID that you wish to cancel", required=True, type=Bytes32ParamType()) @click.option("--insecure", help="Don't make an on-chain transaction, simply mark the offer as cancelled", is_flag=True) @options.create_fee("The fee to use when cancelling the offer securely, in XCH") +@tx_out_cmd def cancel_offer_cmd( - wallet_rpc_port: Optional[int], fingerprint: int, id: bytes32, insecure: bool, fee: uint64 -) -> None: + wallet_rpc_port: Optional[int], fingerprint: int, id: bytes32, insecure: bool, fee: uint64, push: bool +) -> List[TransactionRecord]: from .wallet_funcs import cancel_offer - asyncio.run(cancel_offer(wallet_rpc_port, fingerprint, fee, id, not insecure)) + return asyncio.run(cancel_offer(wallet_rpc_port, fingerprint, fee, id, not insecure, push)) @wallet_cmd.command("check", short_help="Check wallet DB integrity", help=check_help_text) @@ -628,12 +641,13 @@ def did_cmd() -> None: show_default=True, ) @options.create_fee() +@tx_out_cmd def did_create_wallet_cmd( - wallet_rpc_port: Optional[int], fingerprint: int, name: Optional[str], amount: int, fee: uint64 -) -> None: + wallet_rpc_port: Optional[int], fingerprint: int, name: Optional[str], amount: int, fee: uint64, push: bool +) -> List[TransactionRecord]: from .wallet_funcs import create_did_wallet - asyncio.run(create_did_wallet(wallet_rpc_port, fingerprint, fee, name, amount)) + return asyncio.run(create_did_wallet(wallet_rpc_port, fingerprint, fee, name, amount, push)) @did_cmd.command("sign_message", help="Sign a message by a DID") @@ -728,12 +742,13 @@ def did_get_details_cmd(wallet_rpc_port: Optional[int], fingerprint: int, coin_i is_flag=True, default=False, ) +@tx_out_cmd def did_update_metadata_cmd( - wallet_rpc_port: Optional[int], fingerprint: int, id: int, metadata: str, reuse: bool -) -> None: + wallet_rpc_port: Optional[int], fingerprint: int, id: int, metadata: str, reuse: bool, push: bool +) -> List[TransactionRecord]: from .wallet_funcs import update_did_metadata - asyncio.run(update_did_metadata(wallet_rpc_port, fingerprint, id, metadata, reuse)) + return asyncio.run(update_did_metadata(wallet_rpc_port, fingerprint, id, metadata, reuse, push=push)) @did_cmd.command("find_lost", help="Find the did you should own and recovery the DID wallet") @@ -808,13 +823,15 @@ def did_find_lost_cmd( type=str, required=False, ) +@tx_out_cmd def did_message_spend_cmd( wallet_rpc_port: Optional[int], fingerprint: int, id: int, puzzle_announcements: Optional[str], coin_announcements: Optional[str], -) -> None: + push: bool, +) -> List[TransactionRecord]: from .wallet_funcs import did_message_spend puzzle_list: List[str] = [] @@ -827,7 +844,7 @@ def did_message_spend_cmd( bytes.fromhex(announcement) except ValueError: print("Invalid puzzle announcement format, should be a list of hex strings.") - return + return [] if coin_announcements is not None: try: coin_list = coin_announcements.split(",") @@ -836,9 +853,9 @@ def did_message_spend_cmd( bytes.fromhex(announcement) except ValueError: print("Invalid coin announcement format, should be a list of hex strings.") - return + return [] - asyncio.run(did_message_spend(wallet_rpc_port, fingerprint, id, puzzle_list, coin_list)) + return asyncio.run(did_message_spend(wallet_rpc_port, fingerprint, id, puzzle_list, coin_list, push=push)) @did_cmd.command("transfer", help="Transfer a DID") @@ -863,6 +880,7 @@ def did_message_spend_cmd( is_flag=True, default=False, ) +@tx_out_cmd def did_transfer_did( wallet_rpc_port: Optional[int], fingerprint: int, @@ -871,10 +889,11 @@ def did_transfer_did( reset_recovery: bool, fee: uint64, reuse: bool, -) -> None: + push: bool, +) -> List[TransactionRecord]: from .wallet_funcs import transfer_did - asyncio.run( + return asyncio.run( transfer_did( wallet_rpc_port, fingerprint, @@ -883,6 +902,7 @@ def did_transfer_did( target_address, reset_recovery is False, True if reuse else None, + push=push, ) ) @@ -973,6 +993,7 @@ def nft_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, nft_id: C is_flag=True, default=False, ) +@tx_out_cmd def nft_mint_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -991,7 +1012,8 @@ def nft_mint_cmd( fee: uint64, royalty_percentage_fraction: int, reuse: bool, -) -> None: + push: bool, +) -> List[TransactionRecord]: from .wallet_funcs import mint_nft if metadata_uris is None: @@ -1004,7 +1026,7 @@ def nft_mint_cmd( else: license_uris_list = [lu.strip() for lu in license_uris.split(",")] - asyncio.run( + return asyncio.run( mint_nft( wallet_rpc_port=wallet_rpc_port, fp=fingerprint, @@ -1023,6 +1045,7 @@ def nft_mint_cmd( fee=fee, royalty_percentage=royalty_percentage_fraction, reuse_puzhash=True if reuse else None, + push=push, ) ) @@ -1049,6 +1072,7 @@ def nft_mint_cmd( is_flag=True, default=False, ) +@tx_out_cmd def nft_add_uri_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -1059,10 +1083,11 @@ def nft_add_uri_cmd( license_uri: str, fee: uint64, reuse: bool, -) -> None: + push: bool, +) -> List[TransactionRecord]: from .wallet_funcs import add_uri_to_nft - asyncio.run( + return asyncio.run( add_uri_to_nft( wallet_rpc_port=wallet_rpc_port, fp=fingerprint, @@ -1073,6 +1098,7 @@ def nft_add_uri_cmd( metadata_uri=metadata_uri, license_uri=license_uri, reuse_puzhash=True if reuse else None, + push=push, ) ) @@ -1097,6 +1123,7 @@ def nft_add_uri_cmd( is_flag=True, default=False, ) +@tx_out_cmd def nft_transfer_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -1105,10 +1132,11 @@ def nft_transfer_cmd( target_address: CliAddress, fee: uint64, reuse: bool, -) -> None: + push: bool, +) -> List[TransactionRecord]: from .wallet_funcs import transfer_nft - asyncio.run( + return asyncio.run( transfer_nft( wallet_rpc_port=wallet_rpc_port, fp=fingerprint, @@ -1117,6 +1145,7 @@ def nft_transfer_cmd( nft_coin_id=nft_coin_id, target_cli_address=target_address, reuse_puzhash=True if reuse else None, + push=push, ) ) @@ -1236,6 +1265,7 @@ def notification_cmd() -> None: ) @click.option("-n", "--message", help="The message of the notification", type=str) @options.create_fee() +@tx_out_cmd def send_notification_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -1243,11 +1273,12 @@ def send_notification_cmd( amount: CliAmount, message: str, fee: uint64, -) -> None: + push: bool, +) -> List[TransactionRecord]: from .wallet_funcs import send_notification message_bytes: bytes = bytes(message, "utf8") - asyncio.run(send_notification(wallet_rpc_port, fingerprint, fee, to_address, message_bytes, amount)) + return asyncio.run(send_notification(wallet_rpc_port, fingerprint, fee, to_address, message_bytes, amount, push)) @notification_cmd.command("get", help="Get notification(s) that are in your wallet") @@ -1319,16 +1350,18 @@ def vcs_cmd() -> None: # pragma: no cover required=False, ) @options.create_fee("Blockchain fee for mint transaction, in XCH") +@tx_out_cmd def mint_vc_cmd( wallet_rpc_port: Optional[int], fingerprint: int, did: CliAddress, target_address: Optional[CliAddress], fee: uint64, -) -> None: # pragma: no cover + push: bool, +) -> List[TransactionRecord]: from .wallet_funcs import mint_vc - asyncio.run(mint_vc(wallet_rpc_port, fingerprint, did, fee, target_address)) + return asyncio.run(mint_vc(wallet_rpc_port, fingerprint, did, fee, target_address, push)) @vcs_cmd.command("get", short_help="Get a list of existing VCs") @@ -1388,6 +1421,7 @@ def get_vcs_cmd( default=False, show_default=True, ) +@tx_out_cmd def spend_vc_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -1396,10 +1430,11 @@ def spend_vc_cmd( new_proof_hash: str, fee: uint64, reuse_puzhash: bool, -) -> None: # pragma: no cover + push: bool, +) -> List[TransactionRecord]: from .wallet_funcs import spend_vc - asyncio.run( + return asyncio.run( spend_vc( wallet_rpc_port=wallet_rpc_port, fp=fingerprint, @@ -1408,6 +1443,7 @@ def spend_vc_cmd( new_puzhash=new_puzhash, new_proof_hash=new_proof_hash, reuse_puzhash=reuse_puzhash, + push=push, ) ) @@ -1484,6 +1520,7 @@ def get_proofs_for_root_cmd( default=False, show_default=True, ) +@tx_out_cmd def revoke_vc_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -1491,10 +1528,11 @@ def revoke_vc_cmd( vc_id: Optional[bytes32], fee: uint64, reuse_puzhash: bool, -) -> None: # pragma: no cover + push: bool, +) -> List[TransactionRecord]: from .wallet_funcs import revoke_vc - asyncio.run(revoke_vc(wallet_rpc_port, fingerprint, parent_coin_id, vc_id, fee, reuse_puzhash)) + return asyncio.run(revoke_vc(wallet_rpc_port, fingerprint, parent_coin_id, vc_id, fee, reuse_puzhash, push)) @vcs_cmd.command("approve_r_cats", help="Claim any R-CATs that are currently pending VC approval") @@ -1531,6 +1569,7 @@ def revoke_vc_cmd( is_flag=True, default=False, ) +@tx_out_cmd def approve_r_cats_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -1540,11 +1579,20 @@ def approve_r_cats_cmd( min_coin_amount: CliAmount, max_coin_amount: CliAmount, reuse: bool, -) -> None: # pragma: no cover + push: bool, +) -> List[TransactionRecord]: from .wallet_funcs import approve_r_cats - asyncio.run( + return asyncio.run( approve_r_cats( - wallet_rpc_port, fingerprint, uint32(id), min_amount_to_claim, fee, min_coin_amount, max_coin_amount, reuse + wallet_rpc_port, + fingerprint, + uint32(id), + min_amount_to_claim, + fee, + min_coin_amount, + max_coin_amount, + reuse, + push, ) ) diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py index 2be8dcce75f9..f7fd129b37c5 100644 --- a/chia/cmds/wallet_funcs.py +++ b/chia/cmds/wallet_funcs.py @@ -20,7 +20,7 @@ from chia.cmds.param_types import CliAddress, CliAmount from chia.cmds.peer_funcs import print_connections from chia.cmds.units import units -from chia.rpc.wallet_request_types import GetNotifications +from chia.rpc.wallet_request_types import CATSpendResponse, GetNotifications, SendTransactionResponse from chia.rpc.wallet_rpc_client import WalletRpcClient from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.bech32m import bech32_decode, decode_puzzle_hash, encode_puzzle_hash @@ -273,7 +273,8 @@ async def send( excluded_coin_ids: Sequence[bytes32], reuse_puzhash: Optional[bool], clawback_time_lock: int, -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if memo is None: memos = None @@ -282,13 +283,13 @@ async def send( if clawback_time_lock < 0: print("Clawback time lock seconds cannot be negative.") - return + return [] try: typ = await get_wallet_type(wallet_id=wallet_id, wallet_client=wallet_client) mojo_per_unit = get_mojo_per_unit(typ) except LookupError: print(f"Wallet id: {wallet_id} not found.") - return + return [] final_amount: uint64 = amount.convert_amount(mojo_per_unit) @@ -297,14 +298,14 @@ async def send( f"A transaction of amount {final_amount / units['chia']} and fee {fee} is unusual.\n" f"Pass in --override if you are sure you mean to do this." ) - return + return [] if final_amount == 0: print("You can not send an empty transaction") - return + return [] if typ == WalletType.STANDARD_WALLET: print("Submitting transaction...") - res = await wallet_client.send_transaction( + res: Union[CATSpendResponse, SendTransactionResponse] = await wallet_client.send_transaction( wallet_id, final_amount, address.original_address, @@ -321,6 +322,7 @@ async def send( if clawback_time_lock > 0 else None ), + push=push, ) elif typ in {WalletType.CAT, WalletType.CRCAT}: print("Submitting transaction...") @@ -336,23 +338,28 @@ async def send( address.original_address, fee, memos, + push=push, ) else: print("Only standard wallet and CAT wallets are supported") - return - - tx_id = res.name - start = time.time() - while time.time() - start < 10: - await asyncio.sleep(0.1) - tx = await wallet_client.get_transaction(tx_id) - if len(tx.sent_to) > 0: - print(transaction_submitted_msg(tx)) - print(transaction_status_msg(fingerprint, tx_id)) - return None + return [] + + tx_id = res.transaction.name + if push: + start = time.time() + while time.time() - start < 10: + await asyncio.sleep(0.1) + tx = await wallet_client.get_transaction(tx_id) + if len(tx.sent_to) > 0: + print(transaction_submitted_msg(tx)) + print(transaction_status_msg(fingerprint, tx_id)) + return res.transactions print("Transaction not yet submitted to nodes") - print(f"To get status, use command: chia wallet get_transaction -f {fingerprint} -tx 0x{tx_id}") + if push: # pragma: no cover + print(f"To get status, use command: chia wallet get_transaction -f {fingerprint} -tx 0x{tx_id}") + + return res.transactions # pragma: no cover async def get_address(wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, new_address: bool) -> None: @@ -538,7 +545,7 @@ async def make_offer( cli_confirm("Confirm (y/n): ", "Not creating offer...") with filepath.open(mode="w") as file: - offer, trade_record = await wallet_client.create_offer_for_ids( + res = await wallet_client.create_offer_for_ids( offer_dict, driver_dict=driver_dict, fee=fee, @@ -546,12 +553,12 @@ async def make_offer( reuse_puzhash=reuse_puzhash, ).to_tx_config(units["chia"], config, fingerprint), ) - if offer is not None: - file.write(offer.to_bech32()) - print(f"Created offer with ID {trade_record.trade_id}") + if res.offer is not None: + file.write(res.offer.to_bech32()) + print(f"Created offer with ID {res.trade_record.trade_id}") print( f"Use chia wallet get_offers --id " - f"{trade_record.trade_id} -f {fingerprint} to view status" + f"{res.trade_record.trade_id} -f {fingerprint} to view status" ) else: print("Error creating offer") @@ -691,7 +698,8 @@ async def take_offer( fee: uint64, file: str, examine_only: bool, -) -> None: + push: bool = True, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if os.path.exists(file): filepath = pathlib.Path(file) @@ -705,7 +713,7 @@ async def take_offer( offer = Offer.from_bech32(offer_hex) except ValueError: print("Please enter a valid offer file or hex blob") - return + return [] offered, requested, _, _ = offer.summary() cat_name_resolver = wallet_client.cat_asset_id_to_name @@ -770,13 +778,21 @@ async def take_offer( if not examine_only: print() cli_confirm("Would you like to take this offer? (y/n): ") - trade_record = await wallet_client.take_offer( + res = await wallet_client.take_offer( offer, fee=fee, tx_config=CMDTXConfigLoader().to_tx_config(units["chia"], config, fingerprint), + push=push, ) - print(f"Accepted offer with ID {trade_record.trade_id}") - print(f"Use chia wallet get_offers --id {trade_record.trade_id} -f {fingerprint} to view its status") + if push: + print(f"Accepted offer with ID {res.trade_record.trade_id}") + print( + f"Use chia wallet get_offers --id {res.trade_record.trade_id} -f {fingerprint} to view its status" + ) + + return res.transactions + else: + return [] async def cancel_offer( @@ -785,19 +801,27 @@ async def cancel_offer( fee: uint64, offer_id: bytes32, secure: bool, -) -> None: + push: bool = True, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): trade_record = await wallet_client.get_offer(offer_id, file_contents=True) await print_trade_record(trade_record, wallet_client, summaries=True) cli_confirm(f"Are you sure you wish to cancel offer with ID: {trade_record.trade_id}? (y/n): ") - await wallet_client.cancel_offer( - offer_id, CMDTXConfigLoader().to_tx_config(units["chia"], config, fingerprint), secure=secure, fee=fee + res = await wallet_client.cancel_offer( + offer_id, + CMDTXConfigLoader().to_tx_config(units["chia"], config, fingerprint), + secure=secure, + fee=fee, + push=push, ) - print(f"Cancelled offer with ID {trade_record.trade_id}") - if secure: + if push or not secure: + print(f"Cancelled offer with ID {trade_record.trade_id}") + if secure and push: print(f"Use chia wallet get_offers --id {trade_record.trade_id} -f {fingerprint} to view cancel status") + return res.transactions + def wallet_coin_unit(typ: WalletType, address_prefix: str) -> Tuple[str, int]: if typ in {WalletType.CAT, WalletType.CRCAT}: @@ -898,19 +922,21 @@ async def print_balances( async def create_did_wallet( - wallet_rpc_port: Optional[int], fp: Optional[int], fee: uint64, name: Optional[str], amount: int -) -> None: + wallet_rpc_port: Optional[int], fp: Optional[int], fee: uint64, name: Optional[str], amount: int, push: bool +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: response = await wallet_client.create_new_did_wallet( - amount, CMDTXConfigLoader().to_tx_config(units["chia"], config, fingerprint), fee, name + amount, CMDTXConfigLoader().to_tx_config(units["chia"], config, fingerprint), fee, name, push=push ) wallet_id = response["wallet_id"] my_did = response["my_did"] print(f"Successfully created a DID wallet with name {name} and id {wallet_id} on key {fingerprint}") print(f"Successfully created a DID {my_did} in the newly created DID wallet") + return [] # TODO: fix this endpoint to return transactions except Exception as e: print(f"Failed to create DID wallet: {e}") + return [] async def did_set_wallet_name(wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, name: str) -> None: @@ -961,7 +987,8 @@ async def update_did_metadata( did_wallet_id: int, metadata: str, reuse_puzhash: bool, -) -> None: + push: bool = True, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: response = await wallet_client.update_did_metadata( @@ -971,11 +998,15 @@ async def update_did_metadata( reuse_puzhash=reuse_puzhash, ).to_tx_config(units["chia"], config, fingerprint), ) - print( - f"Successfully updated DID wallet ID: {response['wallet_id']}, Spend Bundle: {response['spend_bundle']}" - ) + if push: + print( + f"Successfully updated DID wallet ID: {response.wallet_id}, " + f"Spend Bundle: {response.spend_bundle.to_json_dict()}" + ) + return response.transactions except Exception as e: print(f"Failed to update DID metadata: {e}") + return [] async def did_message_spend( @@ -984,7 +1015,8 @@ async def did_message_spend( did_wallet_id: int, puzzle_announcements: List[str], coin_announcements: List[str], -) -> None: + push: bool = True, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: response = await wallet_client.did_message_spend( @@ -994,10 +1026,13 @@ async def did_message_spend( *(CreateCoinAnnouncement(hexstr_to_bytes(ca)) for ca in coin_announcements), *(CreatePuzzleAnnouncement(hexstr_to_bytes(pa)) for pa in puzzle_announcements), ), + push=push, ) - print(f"Message Spend Bundle: {response['spend_bundle']}") + print(f"Message Spend Bundle: {response.spend_bundle.to_json_dict()}") + return response.transactions except Exception as e: print(f"Failed to update DID metadata: {e}") + return [] async def transfer_did( @@ -1008,7 +1043,9 @@ async def transfer_did( target_cli_address: CliAddress, with_recovery: bool, reuse_puzhash: Optional[bool], -) -> None: + push: bool = True, +) -> List[TransactionRecord]: + async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: target_address = target_cli_address.original_address @@ -1020,12 +1057,16 @@ async def transfer_did( tx_config=CMDTXConfigLoader( reuse_puzhash=reuse_puzhash, ).to_tx_config(units["chia"], config, fingerprint), + push=push, ) - print(f"Successfully transferred DID to {target_address}") - print(f"Transaction ID: {response['transaction_id']}") - print(f"Transaction: {response['transaction']}") + if push: + print(f"Successfully transferred DID to {target_address}") + print(f"Transaction ID: {response.transaction_id.hex()}") + print(f"Transaction: {response.transaction.to_json_dict_convenience(config)}") + return response.transactions except Exception as e: print(f"Failed to transfer DID: {e}") + return [] async def find_lost_did( @@ -1084,7 +1125,8 @@ async def mint_nft( fee: uint64, royalty_percentage: int, reuse_puzhash: Optional[bool], -) -> None: + push: bool = True, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): royalty_address = royalty_cli_address.validate_address_type(AddressType.XCH) if royalty_cli_address else None target_address = target_cli_address.validate_address_type(AddressType.XCH) if target_cli_address else None @@ -1103,7 +1145,7 @@ async def mint_nft( if not wallet_has_did: did_id = "" - response = await wallet_client.mint_nft( + mint_response = await wallet_client.mint_nft( wallet_id, royalty_address, target_address, @@ -1121,11 +1163,15 @@ async def mint_nft( fee, royalty_percentage, did_id, + push=push, ) - spend_bundle = response["spend_bundle"] - print(f"NFT minted Successfully with spend bundle: {spend_bundle}") + spend_bundle = mint_response.spend_bundle + if push: + print(f"NFT minted Successfully with spend bundle: {spend_bundle}") + return mint_response.transactions except Exception as e: print(f"Failed to mint NFT: {e}") + return [] async def add_uri_to_nft( @@ -1139,7 +1185,8 @@ async def add_uri_to_nft( metadata_uri: Optional[str], license_uri: Optional[str], reuse_puzhash: Optional[bool], -) -> None: + push: bool = True, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: if len([x for x in (uri, metadata_uri, license_uri) if x is not None]) > 1: @@ -1164,11 +1211,15 @@ async def add_uri_to_nft( tx_config=CMDTXConfigLoader( reuse_puzhash=reuse_puzhash, ).to_tx_config(units["chia"], config, fingerprint), + push=push, ) - spend_bundle = response["spend_bundle"] - print(f"URI added successfully with spend bundle: {spend_bundle}") + spend_bundle = response.spend_bundle.to_json_dict() + if push: + print(f"URI added successfully with spend bundle: {spend_bundle}") + return response.transactions except Exception as e: print(f"Failed to add URI to NFT: {e}") + return [] async def transfer_nft( @@ -1180,7 +1231,8 @@ async def transfer_nft( nft_coin_id: str, target_cli_address: CliAddress, reuse_puzhash: Optional[bool], -) -> None: + push: bool = True, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: target_address = target_cli_address.validate_address_type(AddressType.XCH) @@ -1192,11 +1244,16 @@ async def transfer_nft( tx_config=CMDTXConfigLoader( reuse_puzhash=reuse_puzhash, ).to_tx_config(units["chia"], config, fingerprint), + push=push, ) - spend_bundle = response["spend_bundle"] - print(f"NFT transferred successfully with spend bundle: {spend_bundle}") + spend_bundle = response.spend_bundle.to_json_dict() + if push: + print("NFT transferred successfully") + print(f"spend bundle: {spend_bundle}") + return response.transactions except Exception as e: print(f"Failed to transfer NFT: {e}") + return [] def print_nft_info(nft: NFTInfo, *, config: Dict[str, Any]) -> None: @@ -1274,7 +1331,7 @@ async def set_nft_did( reuse_puzhash=reuse_puzhash, ).to_tx_config(units["chia"], config, fingerprint), ) - spend_bundle = response["spend_bundle"] + spend_bundle = response.spend_bundle.to_json_dict() print(f"Transaction to set DID on NFT has been initiated with: {spend_bundle}") except Exception as e: print(f"Failed to set DID on NFT: {e}") @@ -1354,14 +1411,17 @@ async def send_notification( address: CliAddress, message: bytes, cli_amount: CliAmount, -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): amount: uint64 = cli_amount.convert_amount(units["chia"]) - tx = await wallet_client.send_notification(address.puzzle_hash, message, amount, fee) + tx = await wallet_client.send_notification(address.puzzle_hash, message, amount, fee, push=push) - print("Notification sent successfully.") - print(f"To get status, use command: chia wallet get_transaction -f {fingerprint} -tx 0x{tx.name}") + if push: + print("Notification sent successfully.") + print(f"To get status, use command: chia wallet get_transaction -f {fingerprint} -tx 0x{tx.name}") + return [tx] async def get_notifications( @@ -1433,20 +1493,27 @@ async def sign_message( async def spend_clawback( - *, wallet_rpc_port: Optional[int], fp: Optional[int], fee: uint64, tx_ids_str: str, force: bool = False -) -> None: + *, + wallet_rpc_port: Optional[int], + fp: Optional[int], + fee: uint64, + tx_ids_str: str, + force: bool = False, + push: bool = True, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): tx_ids = [] for tid in tx_ids_str.split(","): tx_ids.append(bytes32.from_hexstr(tid)) if len(tx_ids) == 0: print("Transaction ID is required.") - return + return [] if fee < 0: print("Batch fee cannot be negative.") - return - response = await wallet_client.spend_clawback_coins(tx_ids, fee, force) + return [] + response = await wallet_client.spend_clawback_coins(tx_ids, fee, force, push=push) print(str(response)) + return [TransactionRecord.from_json_dict_convenience(tx) for tx in response["transactions"]] async def mint_vc( @@ -1455,19 +1522,22 @@ async def mint_vc( did: CliAddress, fee: uint64, target_address: Optional[CliAddress], -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): - vc_record, txs = await wallet_client.vc_mint( + res = await wallet_client.vc_mint( did.validate_address_type_get_ph(AddressType.DID), CMDTXConfigLoader().to_tx_config(units["chia"], config, fingerprint), target_address.validate_address_type_get_ph(AddressType.XCH) if target_address else None, fee, + push=push, ) - print(f"New VC with launcher ID minted: {vc_record.vc.launcher_id}") + if push: + print(f"New VC with launcher ID minted: {res.vc_record.vc.launcher_id.hex()}") print("Relevant TX records:") print("") - for tx in txs: + for tx in res.transactions: print_transaction( tx, verbose=False, @@ -1475,6 +1545,7 @@ async def mint_vc( address_prefix=selected_network_address_prefix(config), mojo_per_unit=get_mojo_per_unit(wallet_type=WalletType.STANDARD_WALLET), ) + return res.transactions async def get_vcs(wallet_rpc_port: Optional[int], fp: Optional[int], start: int, count: int) -> None: @@ -1509,19 +1580,24 @@ async def spend_vc( new_puzhash: Optional[bytes32], new_proof_hash: str, reuse_puzhash: bool, -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): - txs = await wallet_client.vc_spend( - vc_id, - new_puzhash=new_puzhash, - new_proof_hash=bytes32.from_hexstr(new_proof_hash), - fee=fee, - tx_config=CMDTXConfigLoader( - reuse_puzhash=reuse_puzhash, - ).to_tx_config(units["chia"], config, fingerprint), - ) + txs = ( + await wallet_client.vc_spend( + vc_id, + new_puzhash=new_puzhash, + new_proof_hash=bytes32.from_hexstr(new_proof_hash), + fee=fee, + tx_config=CMDTXConfigLoader( + reuse_puzhash=reuse_puzhash, + ).to_tx_config(units["chia"], config, fingerprint), + push=push, + ) + ).transactions - print("Proofs successfully updated!") + if push: + print("Proofs successfully updated!") print("Relevant TX records:") print("") for tx in txs: @@ -1532,6 +1608,7 @@ async def spend_vc( address_prefix=selected_network_address_prefix(config), mojo_per_unit=get_mojo_per_unit(wallet_type=WalletType.STANDARD_WALLET), ) + return txs async def add_proof_reveal( @@ -1567,28 +1644,33 @@ async def revoke_vc( vc_id: Optional[bytes32], fee: uint64, reuse_puzhash: bool, -) -> None: + push: bool, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if parent_coin_id is None: if vc_id is None: print("Must specify either --parent-coin-id or --vc-id") - return + return [] record = await wallet_client.vc_get(vc_id) if record is None: print(f"Cannot find a VC with ID {vc_id.hex()}") - return + return [] parent_id: bytes32 = bytes32(record.vc.coin.parent_coin_info) else: parent_id = parent_coin_id - txs = await wallet_client.vc_revoke( - parent_id, - fee=fee, - tx_config=CMDTXConfigLoader( - reuse_puzhash=reuse_puzhash, - ).to_tx_config(units["chia"], config, fingerprint), - ) + txs = ( + await wallet_client.vc_revoke( + parent_id, + fee=fee, + tx_config=CMDTXConfigLoader( + reuse_puzhash=reuse_puzhash, + ).to_tx_config(units["chia"], config, fingerprint), + push=push, + ) + ).transactions - print("VC successfully revoked!") + if push: + print("VC successfully revoked!") print("Relevant TX records:") print("") for tx in txs: @@ -1599,6 +1681,7 @@ async def revoke_vc( address_prefix=selected_network_address_prefix(config), mojo_per_unit=get_mojo_per_unit(wallet_type=WalletType.STANDARD_WALLET), ) + return txs async def approve_r_cats( @@ -1610,7 +1693,8 @@ async def approve_r_cats( min_coin_amount: CliAmount, max_coin_amount: CliAmount, reuse: bool, -) -> None: + push: bool = True, +) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): if wallet_client is None: return @@ -1623,9 +1707,11 @@ async def approve_r_cats( max_coin_amount=max_coin_amount, reuse_puzhash=reuse, ).to_tx_config(units["cat"], config, fingerprint), + push=push, ) - print("VC successfully approved R-CATs!") + if push: + print("VC successfully approved R-CATs!") print("Relevant TX records:") print("") for tx in txs: @@ -1640,7 +1726,7 @@ async def approve_r_cats( ) except LookupError as e: print(e.args[0]) - return + return txs print_transaction( tx, @@ -1649,3 +1735,4 @@ async def approve_r_cats( address_prefix=selected_network_address_prefix(config), mojo_per_unit=mojo_per_unit, ) + return txs diff --git a/chia/data_layer/data_layer.py b/chia/data_layer/data_layer.py index d5665b1f0d9c..8dcc940099b4 100644 --- a/chia/data_layer/data_layer.py +++ b/chia/data_layer/data_layer.py @@ -1026,7 +1026,7 @@ async def make_offer( for our_offer_store in maker } - wallet_offer, trade_record = await self.wallet_rpc.create_offer_for_ids( + res = await self.wallet_rpc.create_offer_for_ids( offer_dict=offer_dict, solver=solver, driver_dict={}, @@ -1036,12 +1036,10 @@ async def make_offer( # This is not a change in behavior, the default was already implicit. tx_config=DEFAULT_TX_CONFIG, ) - if wallet_offer is None: - raise Exception("offer is None despite validate_only=False") offer = Offer( - trade_id=trade_record.trade_id, - offer=bytes(wallet_offer), + trade_id=res.trade_record.trade_id, + offer=bytes(res.offer), taker=taker, maker=tuple(our_store_proofs.values()), ) @@ -1118,14 +1116,16 @@ async def take_offer( # after the transaction is submitted to the chain. If we roll back data we # may lose published data. - trade_record = await self.wallet_rpc.take_offer( - offer=offer, - solver=solver, - fee=fee, - # TODO: probably shouldn't be default but due to peculiarities in the RPC, we're using a stop gap. - # This is not a change in behavior, the default was already implicit. - tx_config=DEFAULT_TX_CONFIG, - ) + trade_record = ( + await self.wallet_rpc.take_offer( + offer=offer, + solver=solver, + fee=fee, + # TODO: probably shouldn't be default but due to peculiarities in the RPC, we're using a stop gap. + # This is not a change in behavior, the default was already implicit. + tx_config=DEFAULT_TX_CONFIG, + ) + ).trade_record return trade_record diff --git a/chia/rpc/util.py b/chia/rpc/util.py index 2d6d2d2a4974..1d8394971d2a 100644 --- a/chia/rpc/util.py +++ b/chia/rpc/util.py @@ -17,8 +17,9 @@ from chia.wallet.trade_record import TradeRecord from chia.wallet.trading.offer import Offer from chia.wallet.transaction_record import TransactionRecord +from chia.wallet.util.blind_signer_tl import BLIND_SIGNER_TRANSLATION from chia.wallet.util.clvm_streamable import ( - byte_serialize_clvm_streamable, + TranslationLayer, json_deserialize_with_clvm_streamable, json_serialize_with_clvm_streamable, ) @@ -34,6 +35,9 @@ MarshallableRpcEndpoint = Callable[..., Awaitable[Streamable]] +ALL_TRANSLATION_LAYERS: Dict[str, TranslationLayer] = {"CHIP-0028": BLIND_SIGNER_TRANSLATION} + + def marshal(func: MarshallableRpcEndpoint) -> RpcEndpoint: hints = get_type_hints(func) request_hint = hints["request"] @@ -46,7 +50,13 @@ async def rpc_endpoint(self, request: Dict[str, Any], *args: object, **kwargs: o ( request_class.from_json_dict(request) if not request.get("CHIP-0029", False) - else json_deserialize_with_clvm_streamable(request, request_hint) + else json_deserialize_with_clvm_streamable( + request, + request_hint, + translation_layer=( + ALL_TRANSLATION_LAYERS[request["translation"]] if "translation" in request else None + ), + ) ), *args, **kwargs, @@ -54,7 +64,12 @@ async def rpc_endpoint(self, request: Dict[str, Any], *args: object, **kwargs: o if not request.get("CHIP-0029", False): return response_obj.to_json_dict() else: - response_dict = json_serialize_with_clvm_streamable(response_obj) + response_dict = json_serialize_with_clvm_streamable( + response_obj, + translation_layer=( + ALL_TRANSLATION_LAYERS[request["translation"]] if "translation" in request else None + ), + ) if isinstance(response_dict, str): # pragma: no cover raise ValueError("Internal Error. Marshalled endpoint was made with clvm_streamable.") return response_dict @@ -156,17 +171,36 @@ async def rpc_endpoint(self, request: Dict[str, Any], *args, **kwargs) -> Dict[s ] unsigned_txs = await self.service.wallet_state_manager.gather_signing_info_for_txs(tx_records) - if not request.get("CHIP-0029", False): - response["unsigned_transactions"] = [tx.to_json_dict() for tx in unsigned_txs] + if request.get("CHIP-0029", False): + response["unsigned_transactions"] = [ + json_serialize_with_clvm_streamable( + tx, + translation_layer=( + ALL_TRANSLATION_LAYERS[request["translation"]] if "translation" in request else None + ), + ) + for tx in unsigned_txs + ] else: - response["unsigned_transactions"] = [byte_serialize_clvm_streamable(tx).hex() for tx in unsigned_txs] + response["unsigned_transactions"] = [tx.to_json_dict() for tx in unsigned_txs] new_txs: List[TransactionRecord] = [] if request.get("sign", self.service.config.get("auto_sign_txs", True)): new_txs, signing_responses = await self.service.wallet_state_manager.sign_transactions( tx_records, response.get("signing_responses", []), "signing_responses" in response ) - response["signing_responses"] = [byte_serialize_clvm_streamable(r).hex() for r in signing_responses] + if request.get("CHIP-0029", False): + response["signing_responses"] = [ + json_serialize_with_clvm_streamable( + sr, + translation_layer=( + ALL_TRANSLATION_LAYERS[request["translation"]] if "translation" in request else None + ), + ) + for sr in signing_responses + ] + else: + response["signing_responses"] = [sr.to_json_dict() for sr in signing_responses] else: new_txs = tx_records # pragma: no cover diff --git a/chia/rpc/wallet_request_types.py b/chia/rpc/wallet_request_types.py index 62fc3afaa0fa..cae18fbc4047 100644 --- a/chia/rpc/wallet_request_types.py +++ b/chia/rpc/wallet_request_types.py @@ -1,13 +1,27 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List, Optional +from typing import Any, Dict, List, Optional, Type, TypeVar from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.types.spend_bundle import SpendBundle from chia.util.ints import uint32 from chia.util.streamable import Streamable, streamable from chia.wallet.notification_store import Notification -from chia.wallet.signer_protocol import SignedTransaction, SigningInstructions, SigningResponse, Spend +from chia.wallet.signer_protocol import ( + SignedTransaction, + SigningInstructions, + SigningResponse, + Spend, + UnsignedTransaction, +) +from chia.wallet.trade_record import TradeRecord +from chia.wallet.trading.offer import Offer +from chia.wallet.transaction_record import TransactionRecord +from chia.wallet.util.clvm_streamable import json_deserialize_with_clvm_streamable +from chia.wallet.vc_wallet.vc_store import VCRecord + +_T_OfferEndpointResponse = TypeVar("_T_OfferEndpointResponse", bound="_OfferEndpointResponse") @streamable @@ -59,3 +73,237 @@ class SubmitTransactions(Streamable): @dataclass(frozen=True) class SubmitTransactionsResponse(Streamable): mempool_ids: List[bytes32] + + +@streamable +@dataclass(frozen=True) +class ExecuteSigningInstructions(Streamable): + signing_instructions: SigningInstructions + partial_allowed: bool = False + + +@streamable +@dataclass(frozen=True) +class ExecuteSigningInstructionsResponse(Streamable): + signing_responses: List[SigningResponse] + + +@streamable +@dataclass(frozen=True) +class TransactionEndpointResponse(Streamable): + unsigned_transactions: List[UnsignedTransaction] + transactions: List[TransactionRecord] + + +# TODO: The section below needs corresponding request types +# TODO: The section below should be added to the API (currently only for client) +@streamable +@dataclass(frozen=True) +class SendTransactionResponse(TransactionEndpointResponse): + transaction: TransactionRecord + transaction_id: bytes32 + + +@streamable +@dataclass(frozen=True) +class SendTransactionMultiResponse(TransactionEndpointResponse): + transaction: TransactionRecord + transaction_id: bytes32 + + +@streamable +@dataclass(frozen=True) +class CreateSignedTransactionsResponse(TransactionEndpointResponse): + signed_txs: List[TransactionRecord] + signed_tx: TransactionRecord + + +@streamable +@dataclass(frozen=True) +class DIDUpdateRecoveryIDsResponse(TransactionEndpointResponse): + pass + + +@streamable +@dataclass(frozen=True) +class DIDMessageSpendResponse(TransactionEndpointResponse): + spend_bundle: SpendBundle + + +@streamable +@dataclass(frozen=True) +class DIDUpdateMetadataResponse(TransactionEndpointResponse): + spend_bundle: SpendBundle + wallet_id: uint32 + + +@streamable +@dataclass(frozen=True) +class DIDTransferDIDResponse(TransactionEndpointResponse): + transaction: TransactionRecord + transaction_id: bytes32 + + +@streamable +@dataclass(frozen=True) +class CATSpendResponse(TransactionEndpointResponse): + transaction: TransactionRecord + transaction_id: bytes32 + + +@streamable +@dataclass(frozen=True) +class _OfferEndpointResponse(TransactionEndpointResponse): + offer: Offer + trade_record: TradeRecord + + @classmethod + def from_json_dict(cls: Type[_T_OfferEndpointResponse], json_dict: Dict[str, Any]) -> _T_OfferEndpointResponse: + tx_endpoint: TransactionEndpointResponse = json_deserialize_with_clvm_streamable( + json_dict, TransactionEndpointResponse + ) + offer: Offer = Offer.from_bech32(json_dict["offer"]) + + return cls( + **tx_endpoint.__dict__, + offer=offer, + trade_record=TradeRecord.from_json_dict_convenience(json_dict["trade_record"], bytes(offer).hex()), + ) + + +@streamable +@dataclass(frozen=True) +class CreateOfferForIDsResponse(_OfferEndpointResponse): + pass + + +@streamable +@dataclass(frozen=True) +class TakeOfferResponse(_OfferEndpointResponse): # Inheriting for de-dup sake + pass + + +@streamable +@dataclass(frozen=True) +class CancelOfferResponse(TransactionEndpointResponse): + pass + + +@streamable +@dataclass(frozen=True) +class CancelOffersResponse(TransactionEndpointResponse): + pass + + +@streamable +@dataclass(frozen=True) +class NFTMintNFTResponse(TransactionEndpointResponse): + wallet_id: uint32 + spend_bundle: SpendBundle + nft_id: str + + +@streamable +@dataclass(frozen=True) +class NFTAddURIResponse(TransactionEndpointResponse): + wallet_id: uint32 + spend_bundle: SpendBundle + + +@streamable +@dataclass(frozen=True) +class NFTTransferNFTResponse(TransactionEndpointResponse): + wallet_id: uint32 + spend_bundle: SpendBundle + + +@streamable +@dataclass(frozen=True) +class NFTSetNFTDIDResponse(TransactionEndpointResponse): + wallet_id: uint32 + spend_bundle: SpendBundle + + +@streamable +@dataclass(frozen=True) +class NFTMintBulkResponse(TransactionEndpointResponse): + spend_bundle: SpendBundle + nft_id_list: List[str] + + +@streamable +@dataclass(frozen=True) +class CreateNewDAOWalletResponse(TransactionEndpointResponse): + type: uint32 + wallet_id: uint32 + treasury_id: bytes32 + cat_wallet_id: uint32 + dao_cat_wallet_id: uint32 + + +@streamable +@dataclass(frozen=True) +class DAOCreateProposalResponse(TransactionEndpointResponse): + proposal_id: bytes32 + tx_id: bytes32 + tx: TransactionRecord + + +@streamable +@dataclass(frozen=True) +class DAOVoteOnProposalResponse(TransactionEndpointResponse): + tx_id: bytes32 + tx: TransactionRecord + + +@streamable +@dataclass(frozen=True) +class DAOCloseProposalResponse(TransactionEndpointResponse): + tx_id: bytes32 + tx: TransactionRecord + + +@streamable +@dataclass(frozen=True) +class DAOFreeCoinsFromFinishedProposalsResponse(TransactionEndpointResponse): + tx_id: bytes32 + tx: TransactionRecord + + +@streamable +@dataclass(frozen=True) +class DAOAddFundsToTreasuryResponse(TransactionEndpointResponse): + tx_id: bytes32 + tx: TransactionRecord + + +@streamable +@dataclass(frozen=True) +class DAOSendToLockupResponse(TransactionEndpointResponse): + tx_id: bytes32 + txs: List[TransactionRecord] + + +@streamable +@dataclass(frozen=True) +class DAOExitLockupResponse(TransactionEndpointResponse): + tx_id: bytes32 + tx: TransactionRecord + + +@streamable +@dataclass(frozen=True) +class VCMintResponse(TransactionEndpointResponse): + vc_record: VCRecord + + +@streamable +@dataclass(frozen=True) +class VCSpendResponse(TransactionEndpointResponse): + pass + + +@streamable +@dataclass(frozen=True) +class VCRevokeResponse(TransactionEndpointResponse): + pass diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py index fb2663347005..451712ef8d11 100644 --- a/chia/rpc/wallet_rpc_api.py +++ b/chia/rpc/wallet_rpc_api.py @@ -22,6 +22,8 @@ from chia.rpc.wallet_request_types import ( ApplySignatures, ApplySignaturesResponse, + ExecuteSigningInstructions, + ExecuteSigningInstructionsResponse, GatherSigningInfo, GatherSigningInfoResponse, GetNotifications, @@ -295,6 +297,8 @@ def get_routes(self) -> Dict[str, Endpoint]: "/gather_signing_info": self.gather_signing_info, "/apply_signatures": self.apply_signatures, "/submit_transactions": self.submit_transactions, + # Not technically Signer Protocol but related + "/execute_signing_instructions": self.execute_signing_instructions, } def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: @@ -383,7 +387,7 @@ async def log_in(self, request: Dict[str, Any]) -> EndpointResult: if started is True: return {"fingerprint": fingerprint} - return {"success": False, "error": "Unknown Error"} + return {"success": False, "error": f"fingerprint {fingerprint} not found in keychain or keychain is empty"} async def get_logged_in_fingerprint(self, request: Dict[str, Any]) -> EndpointResult: return {"fingerprint": self.service.logged_in_fingerprint} @@ -758,6 +762,9 @@ async def create_new_wallet( if type(request["metadata"]) is dict: metadata = request["metadata"] + if not push: + raise ValueError("Creation of DID wallet must be automatically pushed for now.") + async with self.service.wallet_state_manager.lock: did_wallet_name: str = request.get("wallet_name", None) if did_wallet_name is not None: @@ -1205,6 +1212,7 @@ async def send_transaction_multi(self, request: Dict[str, Any]) -> EndpointResul "transaction": transaction, "transaction_id": TransactionRecord.from_json_dict_convenience(transaction).name, "transactions": transactions, + "unsigned_transactions": response["unsigned_transactions"], } @tx_endpoint(push=True, merge_spends=False) @@ -4581,3 +4589,14 @@ async def submit_transactions( return SubmitTransactionsResponse( await self.service.wallet_state_manager.submit_transactions(request.signed_transactions) ) + + @marshal + async def execute_signing_instructions( + self, + request: ExecuteSigningInstructions, + ) -> ExecuteSigningInstructionsResponse: + return ExecuteSigningInstructionsResponse( + await self.service.wallet_state_manager.execute_signing_instructions( + request.signing_instructions, request.partial_allowed + ) + ) diff --git a/chia/rpc/wallet_rpc_client.py b/chia/rpc/wallet_rpc_client.py index a234fed136e3..d2b459948930 100644 --- a/chia/rpc/wallet_rpc_client.py +++ b/chia/rpc/wallet_rpc_client.py @@ -9,12 +9,42 @@ from chia.rpc.wallet_request_types import ( ApplySignatures, ApplySignaturesResponse, + CancelOfferResponse, + CancelOffersResponse, + CATSpendResponse, + CreateNewDAOWalletResponse, + CreateOfferForIDsResponse, + CreateSignedTransactionsResponse, + DAOAddFundsToTreasuryResponse, + DAOCloseProposalResponse, + DAOCreateProposalResponse, + DAOExitLockupResponse, + DAOFreeCoinsFromFinishedProposalsResponse, + DAOSendToLockupResponse, + DAOVoteOnProposalResponse, + DIDMessageSpendResponse, + DIDTransferDIDResponse, + DIDUpdateMetadataResponse, + DIDUpdateRecoveryIDsResponse, + ExecuteSigningInstructions, + ExecuteSigningInstructionsResponse, GatherSigningInfo, GatherSigningInfoResponse, GetNotifications, GetNotificationsResponse, + NFTAddURIResponse, + NFTMintBulkResponse, + NFTMintNFTResponse, + NFTSetNFTDIDResponse, + NFTTransferNFTResponse, + SendTransactionMultiResponse, + SendTransactionResponse, SubmitTransactions, SubmitTransactionsResponse, + TakeOfferResponse, + VCMintResponse, + VCRevokeResponse, + VCSpendResponse, ) from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -117,9 +147,10 @@ async def get_height_info(self) -> uint32: async def push_tx(self, spend_bundle: SpendBundle) -> Dict[str, Any]: return await self.fetch("push_tx", {"spend_bundle": bytes(spend_bundle).hex()}) - async def push_transactions(self, txs: List[TransactionRecord]) -> Dict[str, Any]: + async def push_transactions(self, txs: List[TransactionRecord], sign: bool = False) -> Dict[str, Any]: transactions = [bytes(tx).hex() for tx in txs] - return await self.fetch("push_transactions", {"transactions": transactions}) + + return await self.fetch("push_transactions", {"transactions": transactions, "sign": sign}) async def farm_block(self, address: str) -> Dict[str, Any]: return await self.fetch("farm_block", {"address": address}) @@ -221,7 +252,7 @@ async def send_transaction( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> TransactionRecord: + ) -> SendTransactionResponse: request = { "wallet_id": wallet_id, "amount": amount, @@ -236,7 +267,7 @@ async def send_transaction( if memos is not None: request["memos"] = memos response = await self.fetch("send_transaction", request) - return TransactionRecord.from_json_dict_convenience(response["transaction"]) + return json_deserialize_with_clvm_streamable(response, SendTransactionResponse) async def send_transaction_multi( self, @@ -246,7 +277,7 @@ async def send_transaction_multi( coins: Optional[List[Coin]] = None, fee: uint64 = uint64(0), push: bool = True, - ) -> TransactionRecord: + ) -> SendTransactionMultiResponse: # Converts bytes to hex for puzzle hashes additions_hex = [] for ad in additions: @@ -264,13 +295,14 @@ async def send_transaction_multi( coins_json = [c.to_json_dict() for c in coins] request["coins"] = coins_json response = await self.fetch("send_transaction_multi", request) - return TransactionRecord.from_json_dict_convenience(response["transaction"]) + return json_deserialize_with_clvm_streamable(response, SendTransactionMultiResponse) async def spend_clawback_coins( self, coin_ids: List[bytes32], fee: int = 0, force: bool = False, + push: bool = True, extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> Dict[str, Any]: @@ -279,6 +311,7 @@ async def spend_clawback_coins( "fee": fee, "force": force, "extra_conditions": conditions_to_json_dicts(extra_conditions), + "push": push, **timelock_info.to_json_dict(), } response = await self.fetch("spend_clawback_coins", request) @@ -310,7 +343,7 @@ async def create_signed_transactions( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = False, - ) -> List[TransactionRecord]: + ) -> CreateSignedTransactionsResponse: # Converts bytes to hex for puzzle hashes additions_hex = [] for ad in additions: @@ -335,33 +368,7 @@ async def create_signed_transactions( request["wallet_id"] = wallet_id response = await self.fetch("create_signed_transaction", request) - return [TransactionRecord.from_json_dict_convenience(tx) for tx in response["signed_txs"]] - - async def create_signed_transaction( - self, - additions: List[Dict[str, Any]], - tx_config: TXConfig, - coins: Optional[List[Coin]] = None, - fee: uint64 = uint64(0), - wallet_id: Optional[int] = None, - push: bool = False, - extra_conditions: Tuple[Condition, ...] = tuple(), - timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> TransactionRecord: - txs: List[TransactionRecord] = await self.create_signed_transactions( - additions=additions, - tx_config=tx_config, - coins=coins, - fee=fee, - wallet_id=wallet_id, - push=push, - extra_conditions=extra_conditions, - timelock_info=timelock_info, - ) - if len(txs) == 0: - raise ValueError("`create_signed_transaction` returned empty list!") - - return txs[0] + return json_deserialize_with_clvm_streamable(response, CreateSignedTransactionsResponse) async def select_coins(self, amount: int, wallet_id: int, coin_selection_config: CoinSelectionConfig) -> List[Coin]: request = {"amount": amount, "wallet_id": wallet_id, **coin_selection_config.to_json_dict()} @@ -410,6 +417,7 @@ async def create_new_did_wallet( name: Optional[str] = "DID Wallet", backup_ids: List[str] = [], required_num: int = 0, + push: bool = True, extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> Dict[str, Any]: @@ -421,6 +429,7 @@ async def create_new_did_wallet( "amount": amount, "fee": fee, "wallet_name": name, + "push": push, "extra_conditions": conditions_to_json_dicts(extra_conditions), **tx_config.to_json_dict(), **timelock_info.to_json_dict(), @@ -452,7 +461,7 @@ async def update_did_recovery_list( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> Dict[str, Any]: + ) -> DIDUpdateRecoveryIDsResponse: request = { "wallet_id": wallet_id, "new_list": recovery_list, @@ -463,7 +472,7 @@ async def update_did_recovery_list( **timelock_info.to_json_dict(), } response = await self.fetch("did_update_recovery_ids", request) - return response + return json_deserialize_with_clvm_streamable(response, DIDUpdateRecoveryIDsResponse) async def get_did_recovery_list(self, wallet_id: int) -> Dict[str, Any]: request = {"wallet_id": wallet_id} @@ -477,7 +486,7 @@ async def did_message_spend( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = False, - ) -> Dict[str, Any]: + ) -> DIDMessageSpendResponse: request = { "wallet_id": wallet_id, "extra_conditions": conditions_to_json_dicts(extra_conditions), @@ -486,7 +495,7 @@ async def did_message_spend( **timelock_info.to_json_dict(), } response = await self.fetch("did_message_spend", request) - return response + return json_deserialize_with_clvm_streamable(response, DIDMessageSpendResponse) async def update_did_metadata( self, @@ -496,7 +505,7 @@ async def update_did_metadata( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> Dict[str, Any]: + ) -> DIDUpdateMetadataResponse: request = { "wallet_id": wallet_id, "metadata": metadata, @@ -506,7 +515,7 @@ async def update_did_metadata( **timelock_info.to_json_dict(), } response = await self.fetch("did_update_metadata", request) - return response + return json_deserialize_with_clvm_streamable(response, DIDUpdateMetadataResponse) async def get_did_metadata(self, wallet_id: int) -> Dict[str, Any]: request = {"wallet_id": wallet_id} @@ -572,7 +581,7 @@ async def did_transfer_did( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> Dict[str, Any]: + ) -> DIDTransferDIDResponse: request = { "wallet_id": wallet_id, "inner_address": address, @@ -584,7 +593,7 @@ async def did_transfer_did( **timelock_info.to_json_dict(), } response = await self.fetch("did_transfer_did", request) - return response + return json_deserialize_with_clvm_streamable(response, DIDTransferDIDResponse) async def did_set_wallet_name(self, wallet_id: int, name: str) -> Dict[str, Any]: request = {"wallet_id": wallet_id, "name": name} @@ -724,8 +733,8 @@ async def cat_spend( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> TransactionRecord: - send_dict = { + ) -> CATSpendResponse: + send_dict: Dict[str, Any] = { "wallet_id": wallet_id, "fee": fee, "memos": memos if memos is not None else [], @@ -753,7 +762,7 @@ async def cat_spend( send_dict["tail_reveal"] = bytes(cat_discrepancy[1]).hex() send_dict["tail_solution"] = bytes(cat_discrepancy[2]).hex() res = await self.fetch("cat_spend", send_dict) - return TransactionRecord.from_json_dict_convenience(res["transaction"]) + return json_deserialize_with_clvm_streamable(res, CATSpendResponse) # Offers async def create_offer_for_ids( @@ -766,7 +775,7 @@ async def create_offer_for_ids( validate_only: bool = False, extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> Tuple[Optional[Offer], TradeRecord]: + ) -> CreateOfferForIDsResponse: send_dict: Dict[str, int] = {str(key): value for key, value in offer_dict.items()} req = { @@ -782,9 +791,7 @@ async def create_offer_for_ids( if solver is not None: req["solver"] = solver res = await self.fetch("create_offer_for_ids", req) - offer: Optional[Offer] = None if validate_only else Offer.from_bech32(res["offer"]) - offer_str: str = "" if offer is None else bytes(offer).hex() - return offer, TradeRecord.from_json_dict_convenience(res["trade_record"], offer_str) + return json_deserialize_with_clvm_streamable(res, CreateOfferForIDsResponse) async def get_offer_summary( self, offer: Offer, advanced: bool = False @@ -805,7 +812,7 @@ async def take_offer( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> TradeRecord: + ) -> TakeOfferResponse: req = { "offer": offer.to_bech32(), "fee": fee, @@ -817,7 +824,7 @@ async def take_offer( if solver is not None: req["solver"] = solver res = await self.fetch("take_offer", req) - return TradeRecord.from_json_dict_convenience(res["trade_record"]) + return json_deserialize_with_clvm_streamable(res, TakeOfferResponse) async def get_offer(self, trade_id: bytes32, file_contents: bool = False) -> TradeRecord: res = await self.fetch("get_offer", {"trade_id": trade_id.hex(), "file_contents": file_contents}) @@ -868,8 +875,8 @@ async def cancel_offer( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> None: - await self.fetch( + ) -> CancelOfferResponse: + res = await self.fetch( "cancel_offer", { "trade_id": trade_id.hex(), @@ -882,6 +889,8 @@ async def cancel_offer( }, ) + return json_deserialize_with_clvm_streamable(res, CancelOfferResponse) + async def cancel_offers( self, tx_config: TXConfig, @@ -893,8 +902,8 @@ async def cancel_offers( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> None: - await self.fetch( + ) -> CancelOffersResponse: + res = await self.fetch( "cancel_offers", { "secure": secure, @@ -910,6 +919,8 @@ async def cancel_offers( }, ) + return json_deserialize_with_clvm_streamable(res, CancelOffersResponse) + # NFT wallet async def create_new_nft_wallet(self, did_id: Optional[str], name: Optional[str] = None) -> Dict[str, Any]: request = {"wallet_type": "nft_wallet", "did_id": did_id, "name": name} @@ -936,7 +947,7 @@ async def mint_nft( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> Dict[str, Any]: + ) -> NFTMintNFTResponse: request = { "wallet_id": wallet_id, "royalty_address": royalty_address, @@ -958,8 +969,9 @@ async def mint_nft( **timelock_info.to_json_dict(), } response = await self.fetch("nft_mint_nft", request) - return response + return json_deserialize_with_clvm_streamable(response, NFTMintNFTResponse) + # TODO: add a test for this async def add_uri_to_nft( self, wallet_id: int, @@ -971,7 +983,7 @@ async def add_uri_to_nft( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> Dict[str, Any]: + ) -> NFTAddURIResponse: # pragma: no cover request = { "wallet_id": wallet_id, "nft_coin_id": nft_coin_id, @@ -984,7 +996,7 @@ async def add_uri_to_nft( **timelock_info.to_json_dict(), } response = await self.fetch("nft_add_uri", request) - return response + return json_deserialize_with_clvm_streamable(response, NFTAddURIResponse) async def nft_calculate_royalties( self, @@ -1017,7 +1029,7 @@ async def transfer_nft( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> Dict[str, Any]: + ) -> NFTTransferNFTResponse: request = { "wallet_id": wallet_id, "nft_coin_id": nft_coin_id, @@ -1029,7 +1041,7 @@ async def transfer_nft( **timelock_info.to_json_dict(), } response = await self.fetch("nft_transfer_nft", request) - return response + return json_deserialize_with_clvm_streamable(response, NFTTransferNFTResponse) async def count_nfts(self, wallet_id: Optional[int]) -> Dict[str, Any]: request = {"wallet_id": wallet_id} @@ -1041,6 +1053,7 @@ async def list_nfts(self, wallet_id: int, num: int = 50, start_index: int = 0) - response = await self.fetch("nft_get_nfts", request) return response + # TODO: add a test for this async def set_nft_did( self, wallet_id: int, @@ -1051,7 +1064,7 @@ async def set_nft_did( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> Dict[str, Any]: + ) -> NFTSetNFTDIDResponse: # pragma: no cover request = { "wallet_id": wallet_id, "did_id": did_id, @@ -1063,7 +1076,7 @@ async def set_nft_did( **timelock_info.to_json_dict(), } response = await self.fetch("nft_set_nft_did", request) - return response + return json_deserialize_with_clvm_streamable(response, NFTSetNFTDIDResponse) async def get_nft_wallet_did(self, wallet_id: int) -> Dict[str, Any]: request = {"wallet_id": wallet_id} @@ -1090,7 +1103,7 @@ async def nft_mint_bulk( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = False, - ) -> Dict[str, Any]: + ) -> NFTMintBulkResponse: request = { "wallet_id": wallet_id, "metadata_list": metadata_list, @@ -1112,7 +1125,7 @@ async def nft_mint_bulk( **timelock_info.to_json_dict(), } response = await self.fetch("nft_mint_bulk", request) - return response + return json_deserialize_with_clvm_streamable(response, NFTMintBulkResponse) # DataLayer async def create_new_dl( @@ -1257,10 +1270,11 @@ async def dl_delete_mirror( async def dl_verify_proof(self, request: DLProof) -> VerifyProofResponse: response = await self.fetch(path="dl_verify_proof", request_json=request.to_json_dict()) - return VerifyProofResponse.from_json_dict(response) + return json_deserialize_with_clvm_streamable(response, VerifyProofResponse) async def get_notifications(self, request: GetNotifications) -> GetNotificationsResponse: - return GetNotificationsResponse.from_json_dict(await self.fetch("get_notifications", request.to_json_dict())) + response = await self.fetch("get_notifications", request.to_json_dict()) + return json_deserialize_with_clvm_streamable(response, GetNotificationsResponse) async def delete_notifications(self, ids: Optional[Sequence[bytes32]] = None) -> bool: request = {} @@ -1279,6 +1293,7 @@ async def send_notification( fee: uint64 = uint64(0), extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), + push: bool = True, ) -> TransactionRecord: response = await self.fetch( "send_notification", @@ -1288,6 +1303,7 @@ async def send_notification( "amount": amount, "fee": fee, "extra_conditions": conditions_to_json_dicts(extra_conditions), + "push": push, **timelock_info.to_json_dict(), }, ) @@ -1314,11 +1330,12 @@ async def create_new_dao_wallet( fee: uint64 = uint64(0), fee_for_cat: uint64 = uint64(0), extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> Dict[str, Any]: - request = { + push: bool = True, + ) -> CreateNewDAOWalletResponse: + request: Dict[str, Any] = { "wallet_type": "dao_wallet", "mode": mode, - "treasury_id": treasury_id, + "treasury_id": treasury_id.hex() if treasury_id is not None else treasury_id, "dao_rules": dao_rules, "amount_of_cats": amount_of_cats, "filter_amount": filter_amount, @@ -1326,10 +1343,11 @@ async def create_new_dao_wallet( "fee": fee, "fee_for_cat": fee_for_cat, "extra_conditions": list(extra_conditions), + "push": push, **tx_config.to_json_dict(), } response = await self.fetch("create_new_wallet", request) - return response + return json_deserialize_with_clvm_streamable(response, CreateNewDAOWalletResponse) async def dao_get_treasury_id(self, wallet_id: int) -> Dict[str, Any]: request = {"wallet_id": wallet_id} @@ -1355,8 +1373,9 @@ async def dao_create_proposal( new_dao_rules: Optional[Dict[str, Optional[uint64]]] = None, fee: uint64 = uint64(0), extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> Dict[str, Any]: - request = { + push: bool = True, + ) -> DAOCreateProposalResponse: + request: Dict[str, Any] = { "wallet_id": wallet_id, "proposal_type": proposal_type, "additions": additions, @@ -1372,7 +1391,7 @@ async def dao_create_proposal( } response = await self.fetch("dao_create_proposal", request) - return response + return json_deserialize_with_clvm_streamable(response, DAOCreateProposalResponse) async def dao_get_proposal_state(self, wallet_id: int, proposal_id: str) -> Dict[str, Any]: request = {"wallet_id": wallet_id, "proposal_id": proposal_id} @@ -1393,18 +1412,20 @@ async def dao_vote_on_proposal( is_yes_vote: bool = True, fee: uint64 = uint64(0), extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> Dict[str, Any]: - request = { + push: bool = True, + ) -> DAOVoteOnProposalResponse: + request: Dict[str, Any] = { "wallet_id": wallet_id, "proposal_id": proposal_id, "vote_amount": vote_amount, "is_yes_vote": is_yes_vote, "fee": fee, "extra_conditions": list(extra_conditions), + "push": push, **tx_config.to_json_dict(), } response = await self.fetch("dao_vote_on_proposal", request) - return response + return json_deserialize_with_clvm_streamable(response, DAOVoteOnProposalResponse) async def dao_get_proposals(self, wallet_id: int, include_closed: bool = True) -> Dict[str, Any]: request = {"wallet_id": wallet_id, "include_closed": include_closed} @@ -1419,17 +1440,19 @@ async def dao_close_proposal( self_destruct: Optional[bool] = None, fee: uint64 = uint64(0), extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> Dict[str, Any]: - request = { + push: bool = True, + ) -> DAOCloseProposalResponse: + request: Dict[str, Any] = { "wallet_id": wallet_id, "proposal_id": proposal_id, "self_destruct": self_destruct, "fee": fee, "extra_conditions": list(extra_conditions), + "push": push, **tx_config.to_json_dict(), } response = await self.fetch("dao_close_proposal", request) - return response + return json_deserialize_with_clvm_streamable(response, DAOCloseProposalResponse) async def dao_free_coins_from_finished_proposals( self, @@ -1437,15 +1460,16 @@ async def dao_free_coins_from_finished_proposals( tx_config: TXConfig, fee: uint64 = uint64(0), extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> Dict[str, Any]: - request = { + push: bool = True, + ) -> DAOFreeCoinsFromFinishedProposalsResponse: + request: Dict[str, Any] = { "wallet_id": wallet_id, "fee": fee, "extra_conditions": list(extra_conditions), **tx_config.to_json_dict(), } response = await self.fetch("dao_free_coins_from_finished_proposals", request) - return response + return json_deserialize_with_clvm_streamable(response, DAOFreeCoinsFromFinishedProposalsResponse) async def dao_get_treasury_balance(self, wallet_id: int) -> Dict[str, Any]: request = {"wallet_id": wallet_id} @@ -1460,17 +1484,19 @@ async def dao_add_funds_to_treasury( tx_config: TXConfig, fee: uint64 = uint64(0), extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> Dict[str, Any]: - request = { + push: bool = True, + ) -> DAOAddFundsToTreasuryResponse: + request: Dict[str, Any] = { "wallet_id": wallet_id, "funding_wallet_id": funding_wallet_id, "amount": amount, "fee": fee, "extra_conditions": list(extra_conditions), + "push": push, **tx_config.to_json_dict(), } response = await self.fetch("dao_add_funds_to_treasury", request) - return response + return json_deserialize_with_clvm_streamable(response, DAOAddFundsToTreasuryResponse) async def dao_send_to_lockup( self, @@ -1479,16 +1505,18 @@ async def dao_send_to_lockup( tx_config: TXConfig, fee: uint64 = uint64(0), extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> Dict[str, Any]: - request = { + push: bool = True, + ) -> DAOSendToLockupResponse: + request: Dict[str, Any] = { "wallet_id": wallet_id, "amount": amount, "fee": fee, "extra_conditions": list(extra_conditions), + "push": push, **tx_config.to_json_dict(), } response = await self.fetch("dao_send_to_lockup", request) - return response + return json_deserialize_with_clvm_streamable(response, DAOSendToLockupResponse) async def dao_exit_lockup( self, @@ -1497,16 +1525,18 @@ async def dao_exit_lockup( coins: Optional[List[Dict[str, Any]]] = None, fee: uint64 = uint64(0), extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> Dict[str, Any]: - request = { + push: bool = True, + ) -> DAOExitLockupResponse: + request: Dict[str, Any] = { "wallet_id": wallet_id, "coins": coins, "fee": fee, "extra_conditions": list(extra_conditions), + "push": push, **tx_config.to_json_dict(), } response = await self.fetch("dao_exit_lockup", request) - return response + return json_deserialize_with_clvm_streamable(response, DAOExitLockupResponse) async def dao_adjust_filter_level(self, wallet_id: int, filter_level: int) -> Dict[str, Any]: request = {"wallet_id": wallet_id, "filter_level": filter_level} @@ -1522,7 +1552,7 @@ async def vc_mint( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> Tuple[VCRecord, List[TransactionRecord]]: + ) -> VCMintResponse: response = await self.fetch( "vc_mint", { @@ -1535,9 +1565,7 @@ async def vc_mint( **timelock_info.to_json_dict(), }, ) - return VCRecord.from_json_dict(response["vc_record"]), [ - TransactionRecord.from_json_dict_convenience(tx) for tx in response["transactions"] - ] + return json_deserialize_with_clvm_streamable(response, VCMintResponse) async def vc_get(self, vc_id: bytes32) -> Optional[VCRecord]: response = await self.fetch("vc_get", {"vc_id": vc_id.hex()}) @@ -1558,7 +1586,7 @@ async def vc_spend( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> List[TransactionRecord]: + ) -> VCSpendResponse: response = await self.fetch( "vc_spend", { @@ -1575,7 +1603,7 @@ async def vc_spend( **timelock_info.to_json_dict(), }, ) - return [TransactionRecord.from_json_dict_convenience(tx) for tx in response["transactions"]] + return json_deserialize_with_clvm_streamable(response, VCSpendResponse) async def vc_add_proofs(self, proofs: Dict[str, Any]) -> None: await self.fetch("vc_add_proofs", {"proofs": proofs}) @@ -1592,7 +1620,7 @@ async def vc_revoke( extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, - ) -> List[TransactionRecord]: + ) -> VCRevokeResponse: response = await self.fetch( "vc_revoke", { @@ -1604,7 +1632,7 @@ async def vc_revoke( **timelock_info.to_json_dict(), }, ) - return [TransactionRecord.from_json_dict_convenience(tx) for tx in response["transactions"]] + return json_deserialize_with_clvm_streamable(response, VCRevokeResponse) async def crcat_approve_pending( self, @@ -1661,3 +1689,11 @@ async def submit_transactions( ), SubmitTransactionsResponse, ) + + async def execute_signing_instructions( + self, + args: ExecuteSigningInstructions, + ) -> ExecuteSigningInstructionsResponse: + return ExecuteSigningInstructionsResponse.from_json_dict( + await self.fetch("execute_signing_instructions", args.to_json_dict()) + ) diff --git a/chia/util/streamable.py b/chia/util/streamable.py index 6128e6e866c6..8b1d255317bf 100644 --- a/chia/util/streamable.py +++ b/chia/util/streamable.py @@ -272,7 +272,9 @@ def function_to_post_init_process_one_item(f_type: Type[object]) -> ConvertFunct return lambda item: post_init_process_item(f_type, item) -def recurse_jsonify(d: Any, next_recursion_step: Optional[Callable[[Any, Any], Any]] = None) -> Any: +def recurse_jsonify( + d: Any, next_recursion_step: Optional[Callable[[Any, Any], Any]] = None, **next_recursion_env: Any +) -> Any: """ Makes bytes objects into strings with 0x, and makes large ints into strings. """ @@ -281,19 +283,19 @@ def recurse_jsonify(d: Any, next_recursion_step: Optional[Callable[[Any, Any], A if dataclasses.is_dataclass(d): new_dict = {} for field in dataclasses.fields(d): - new_dict[field.name] = next_recursion_step(getattr(d, field.name), None) + new_dict[field.name] = next_recursion_step(getattr(d, field.name), None, **next_recursion_env) return new_dict elif isinstance(d, (list, tuple)): new_list = [] for item in d: - new_list.append(next_recursion_step(item, None)) + new_list.append(next_recursion_step(item, None, **next_recursion_env)) return new_list elif isinstance(d, dict): new_dict = {} for name, val in d.items(): - new_dict[name] = next_recursion_step(val, None) + new_dict[name] = next_recursion_step(val, None, **next_recursion_env) return new_dict elif issubclass(type(d), bytes): diff --git a/chia/wallet/signer_protocol.py b/chia/wallet/signer_protocol.py index 302dc0706f33..bd6800735359 100644 --- a/chia/wallet/signer_protocol.py +++ b/chia/wallet/signer_protocol.py @@ -13,7 +13,7 @@ from chia.wallet.util.clvm_streamable import clvm_streamable # This file contains the base types for communication between a wallet and an offline transaction signer. -# These types should be compliant with CHIP-TBD +# These types should be compliant with CHIP-0028 @clvm_streamable diff --git a/chia/wallet/transaction_record.py b/chia/wallet/transaction_record.py index 6e039dbd67b1..c093383caf0f 100644 --- a/chia/wallet/transaction_record.py +++ b/chia/wallet/transaction_record.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Dict, Generic, List, Optional, Tuple, Type, TypeVar +from typing import Any, Dict, Generic, List, Optional, Tuple, Type, TypeVar from chia.consensus.coinbase import farmer_parent_id, pool_parent_id from chia.types.blockchain_format.coin import Coin @@ -101,6 +101,13 @@ def from_json_dict_convenience(cls: Type[_T_TransactionRecord], modified_tx_inpu modified_tx["memos"] = memos_list return cls.from_json_dict(modified_tx) + @classmethod + def from_json_dict(cls: Type[_T_TransactionRecord], json_dict: Dict[str, Any]) -> _T_TransactionRecord: + try: + return super().from_json_dict(json_dict) + except Exception: + return cls.from_json_dict_convenience(json_dict) + def to_json_dict_convenience(self, config: Dict) -> Dict: selected = config["selected_network"] prefix = config["network_overrides"]["config"][selected]["address_prefix"] diff --git a/chia/wallet/util/blind_signer_tl.py b/chia/wallet/util/blind_signer_tl.py new file mode 100644 index 000000000000..d5e8abb8cf50 --- /dev/null +++ b/chia/wallet/util/blind_signer_tl.py @@ -0,0 +1,168 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import List + +from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.util.ints import uint64 +from chia.util.streamable import Streamable +from chia.wallet.signer_protocol import ( + KeyHints, + PathHint, + SigningInstructions, + SigningResponse, + SigningTarget, + SumHint, + TransactionInfo, + UnsignedTransaction, +) +from chia.wallet.util.clvm_streamable import TranslationLayer, TranslationLayerMapping, clvm_streamable + +# Pylint doesn't understand that these classes are in fact dataclasses +# pylint: disable=invalid-field-call + + +@clvm_streamable +@dataclass(frozen=True) +class BSTLSigningTarget(Streamable): + fingerprint: bytes = field(metadata=dict(key="f")) + message: bytes = field(metadata=dict(key="m")) + hook: bytes32 = field(metadata=dict(key="h")) + + @staticmethod + def from_wallet_api(_from: SigningTarget) -> BSTLSigningTarget: + return BSTLSigningTarget(**_from.__dict__) + + @staticmethod + def to_wallet_api(_from: BSTLSigningTarget) -> SigningTarget: + return SigningTarget(**_from.__dict__) + + +@clvm_streamable +@dataclass(frozen=True) +class BSTLSumHint(Streamable): + fingerprints: List[bytes] = field(metadata=dict(key="f")) + synthetic_offset: bytes = field(metadata=dict(key="o")) + final_pubkey: bytes = field(metadata=dict(key="p")) + + @staticmethod + def from_wallet_api(_from: SumHint) -> BSTLSumHint: + return BSTLSumHint(**_from.__dict__) + + @staticmethod + def to_wallet_api(_from: BSTLSumHint) -> SumHint: + return SumHint(**_from.__dict__) + + +@clvm_streamable +@dataclass(frozen=True) +class BSTLPathHint(Streamable): + root_fingerprint: bytes = field(metadata=dict(key="f")) + path: List[uint64] = field(metadata=dict(key="p")) + + @staticmethod + def from_wallet_api(_from: PathHint) -> BSTLPathHint: + return BSTLPathHint(**_from.__dict__) + + @staticmethod + def to_wallet_api(_from: BSTLPathHint) -> PathHint: + return PathHint(**_from.__dict__) + + +@clvm_streamable +@dataclass(frozen=True) +class BSTLSigningInstructions(Streamable): + sum_hints: List[BSTLSumHint] = field(metadata=dict(key="s")) + path_hints: List[BSTLPathHint] = field(metadata=dict(key="p")) + targets: List[BSTLSigningTarget] = field(metadata=dict(key="t")) + + @staticmethod + def from_wallet_api(_from: SigningInstructions) -> BSTLSigningInstructions: + return BSTLSigningInstructions( + [BSTLSumHint.from_wallet_api(sum_hint) for sum_hint in _from.key_hints.sum_hints], + [BSTLPathHint.from_wallet_api(path_hint) for path_hint in _from.key_hints.path_hints], + [BSTLSigningTarget.from_wallet_api(signing_target) for signing_target in _from.targets], + ) + + @staticmethod + def to_wallet_api(_from: BSTLSigningInstructions) -> SigningInstructions: + return SigningInstructions( + KeyHints( + [BSTLSumHint.to_wallet_api(sum_hint) for sum_hint in _from.sum_hints], + [BSTLPathHint.to_wallet_api(path_hint) for path_hint in _from.path_hints], + ), + [BSTLSigningTarget.to_wallet_api(signing_target) for signing_target in _from.targets], + ) + + +@clvm_streamable +@dataclass(frozen=True) +class BSTLUnsignedTransaction(Streamable): + sum_hints: List[BSTLSumHint] = field(metadata=dict(key="s")) + path_hints: List[BSTLPathHint] = field(metadata=dict(key="p")) + targets: List[BSTLSigningTarget] = field(metadata=dict(key="t")) + + @staticmethod + def from_wallet_api(_from: UnsignedTransaction) -> BSTLUnsignedTransaction: + return BSTLUnsignedTransaction( + [BSTLSumHint.from_wallet_api(sum_hint) for sum_hint in _from.signing_instructions.key_hints.sum_hints], + [BSTLPathHint.from_wallet_api(path_hint) for path_hint in _from.signing_instructions.key_hints.path_hints], + [ + BSTLSigningTarget.from_wallet_api(signing_target) + for signing_target in _from.signing_instructions.targets + ], + ) + + @staticmethod + def to_wallet_api(_from: BSTLUnsignedTransaction) -> UnsignedTransaction: + return UnsignedTransaction( + TransactionInfo([]), + SigningInstructions( + KeyHints( + [BSTLSumHint.to_wallet_api(sum_hint) for sum_hint in _from.sum_hints], + [BSTLPathHint.to_wallet_api(path_hint) for path_hint in _from.path_hints], + ), + [BSTLSigningTarget.to_wallet_api(signing_target) for signing_target in _from.targets], + ), + ) + + +@clvm_streamable +@dataclass(frozen=True) +class BSTLSigningResponse(Streamable): + signature: bytes = field(metadata=dict(key="s")) + hook: bytes32 = field(metadata=dict(key="h")) + + @staticmethod + def from_wallet_api(_from: SigningResponse) -> BSTLSigningResponse: + return BSTLSigningResponse(**_from.__dict__) + + @staticmethod + def to_wallet_api(_from: BSTLSigningResponse) -> SigningResponse: + return SigningResponse(**_from.__dict__) + + +BLIND_SIGNER_TRANSLATION = TranslationLayer( + [ + TranslationLayerMapping( + SigningTarget, BSTLSigningTarget, BSTLSigningTarget.from_wallet_api, BSTLSigningTarget.to_wallet_api + ), + TranslationLayerMapping(SumHint, BSTLSumHint, BSTLSumHint.from_wallet_api, BSTLSumHint.to_wallet_api), + TranslationLayerMapping(PathHint, BSTLPathHint, BSTLPathHint.from_wallet_api, BSTLPathHint.to_wallet_api), + TranslationLayerMapping( + SigningInstructions, + BSTLSigningInstructions, + BSTLSigningInstructions.from_wallet_api, + BSTLSigningInstructions.to_wallet_api, + ), + TranslationLayerMapping( + SigningResponse, BSTLSigningResponse, BSTLSigningResponse.from_wallet_api, BSTLSigningResponse.to_wallet_api + ), + TranslationLayerMapping( + UnsignedTransaction, + BSTLUnsignedTransaction, + BSTLUnsignedTransaction.from_wallet_api, + BSTLUnsignedTransaction.to_wallet_api, + ), + ] +) diff --git a/chia/wallet/util/clvm_streamable.py b/chia/wallet/util/clvm_streamable.py index ac98522ec6b3..66a8233a5c5d 100644 --- a/chia/wallet/util/clvm_streamable.py +++ b/chia/wallet/util/clvm_streamable.py @@ -2,7 +2,7 @@ import dataclasses import functools -from typing import Any, Callable, Dict, Optional, Type, TypeVar, Union, get_args, get_type_hints +from typing import Any, Callable, Dict, Generic, List, Optional, Type, TypeVar, Union, get_args, get_type_hints from hsms.clvm_serde import from_program_for_type, to_program_for_type @@ -33,34 +33,62 @@ def clvm_streamable(cls: Type[Streamable]) -> Type[Streamable]: return wrapped_cls -def program_serialize_clvm_streamable(clvm_streamable: Streamable) -> Program: +def program_serialize_clvm_streamable( + clvm_streamable: Streamable, translation_layer: Optional[TranslationLayer] = None +) -> Program: + if translation_layer is not None: + mapping = translation_layer.get_mapping(clvm_streamable.__class__) + if mapping is not None: + clvm_streamable = translation_layer.serialize_for_translation(clvm_streamable, mapping) # Underlying hinting problem with clvm_serde return to_program_for_type(type(clvm_streamable))(clvm_streamable) # type: ignore[no-any-return] -def byte_serialize_clvm_streamable(clvm_streamable: Streamable) -> bytes: - return bytes(program_serialize_clvm_streamable(clvm_streamable)) +def byte_serialize_clvm_streamable( + clvm_streamable: Streamable, translation_layer: Optional[TranslationLayer] = None +) -> bytes: + return bytes(program_serialize_clvm_streamable(clvm_streamable, translation_layer=translation_layer)) def json_serialize_with_clvm_streamable( - streamable: Any, next_recursion_step: Optional[Callable[[Any, Any], Dict[str, Any]]] = None + streamable: Any, + next_recursion_step: Optional[Callable[..., Dict[str, Any]]] = None, + translation_layer: Optional[TranslationLayer] = None, + **next_recursion_env: Any, ) -> Union[str, Dict[str, Any]]: if next_recursion_step is None: next_recursion_step = recurse_jsonify if hasattr(streamable, "_clvm_streamable"): # If we are using clvm_serde, we stop JSON serialization at this point and instead return the clvm blob - return byte_serialize_clvm_streamable(streamable).hex() + return byte_serialize_clvm_streamable(streamable, translation_layer=translation_layer).hex() else: - return next_recursion_step(streamable, json_serialize_with_clvm_streamable) + return next_recursion_step( + streamable, json_serialize_with_clvm_streamable, translation_layer=translation_layer, **next_recursion_env + ) -def program_deserialize_clvm_streamable(program: Program, clvm_streamable_type: Type[_T_Streamable]) -> _T_Streamable: - # Underlying hinting problem with clvm_serde - return from_program_for_type(clvm_streamable_type)(program) # type: ignore[no-any-return] +def program_deserialize_clvm_streamable( + program: Program, clvm_streamable_type: Type[_T_Streamable], translation_layer: Optional[TranslationLayer] = None +) -> _T_Streamable: + type_to_deserialize_from: Type[Streamable] = clvm_streamable_type + if translation_layer is not None: + mapping = translation_layer.get_mapping(clvm_streamable_type) + if mapping is not None: + type_to_deserialize_from = mapping.to_type + as_instance = from_program_for_type(type_to_deserialize_from)(program) + if translation_layer is not None and mapping is not None: + return translation_layer.deserialize_from_translation(as_instance, mapping) + else: + # Underlying hinting problem with clvm_serde + return as_instance # type: ignore[no-any-return] -def byte_deserialize_clvm_streamable(blob: bytes, clvm_streamable_type: Type[_T_Streamable]) -> _T_Streamable: - return program_deserialize_clvm_streamable(Program.from_bytes(blob), clvm_streamable_type) +def byte_deserialize_clvm_streamable( + blob: bytes, clvm_streamable_type: Type[_T_Streamable], translation_layer: Optional[TranslationLayer] = None +) -> _T_Streamable: + return program_deserialize_clvm_streamable( + Program.from_bytes(blob), clvm_streamable_type, translation_layer=translation_layer + ) def is_compound_type(typ: Any) -> bool: @@ -68,10 +96,14 @@ def is_compound_type(typ: Any) -> bool: def json_deserialize_with_clvm_streamable( - json_dict: Union[str, Dict[str, Any]], streamable_type: Type[_T_Streamable] + json_dict: Union[str, Dict[str, Any]], + streamable_type: Type[_T_Streamable], + translation_layer: Optional[TranslationLayer] = None, ) -> _T_Streamable: if isinstance(json_dict, str): - return byte_deserialize_clvm_streamable(bytes.fromhex(json_dict), streamable_type) + return byte_deserialize_clvm_streamable( + bytes.fromhex(json_dict), streamable_type, translation_layer=translation_layer + ) else: old_streamable_fields = streamable_type.streamable_fields() new_streamable_fields = [] @@ -84,7 +116,11 @@ def json_deserialize_with_clvm_streamable( old_field, convert_function=function_to_convert_one_item( old_field.type, - functools.partial(json_deserialize_with_clvm_streamable, streamable_type=inner_type), + functools.partial( + json_deserialize_with_clvm_streamable, + streamable_type=inner_type, + translation_layer=translation_layer, + ), ), ) ) @@ -95,7 +131,9 @@ def json_deserialize_with_clvm_streamable( dataclasses.replace( old_field, convert_function=functools.partial( - json_deserialize_with_clvm_streamable, streamable_type=old_field.type + json_deserialize_with_clvm_streamable, + streamable_type=old_field.type, + translation_layer=translation_layer, ), ) ) @@ -104,3 +142,47 @@ def json_deserialize_with_clvm_streamable( setattr(streamable_type, "_streamable_fields", tuple(new_streamable_fields)) return streamable_type.from_json_dict(json_dict) + + +_T_ClvmStreamable = TypeVar("_T_ClvmStreamable", bound="Streamable") +_T_TLClvmStreamable = TypeVar("_T_TLClvmStreamable", bound="Streamable") + + +@dataclasses.dataclass(frozen=True) +class TranslationLayerMapping(Generic[_T_ClvmStreamable, _T_TLClvmStreamable]): + from_type: Type[_T_ClvmStreamable] + to_type: Type[_T_TLClvmStreamable] + serialize_function: Callable[[_T_ClvmStreamable], _T_TLClvmStreamable] + deserialize_function: Callable[[_T_TLClvmStreamable], _T_ClvmStreamable] + + +@dataclasses.dataclass(frozen=True) +class TranslationLayer: + type_mappings: List[TranslationLayerMapping[Any, Any]] + + def get_mapping( + self, _type: Type[_T_ClvmStreamable] + ) -> Optional[TranslationLayerMapping[_T_ClvmStreamable, Streamable]]: + mappings = [m for m in self.type_mappings if m.from_type == _type] + if len(mappings) == 1: + return mappings[0] + elif len(mappings) == 0: + return None + else: # pragma: no cover + raise RuntimeError("Malformed TranslationLayer") + + def serialize_for_translation( + self, instance: _T_ClvmStreamable, mapping: TranslationLayerMapping[_T_ClvmStreamable, _T_TLClvmStreamable] + ) -> _T_TLClvmStreamable: + if mapping is None: + return instance + else: + return mapping.serialize_function(instance) + + def deserialize_from_translation( + self, instance: _T_TLClvmStreamable, mapping: TranslationLayerMapping[_T_ClvmStreamable, _T_TLClvmStreamable] + ) -> _T_ClvmStreamable: + if mapping is None: + return instance + else: + return mapping.deserialize_function(instance) diff --git a/chia/wallet/wallet.py b/chia/wallet/wallet.py index a3342080c179..e91b9fb68780 100644 --- a/chia/wallet/wallet.py +++ b/chia/wallet/wallet.py @@ -169,8 +169,8 @@ async def convert_puzzle_hash(self, puzzle_hash: bytes32) -> bytes32: return puzzle_hash # Looks unimpressive, but it's more complicated in other wallets async def puzzle_for_puzzle_hash(self, puzzle_hash: bytes32) -> Program: - secret_key = await self.wallet_state_manager.get_private_key(puzzle_hash) - return puzzle_for_pk(secret_key.get_g1()) + public_key = await self.wallet_state_manager.get_public_key(puzzle_hash) + return puzzle_for_pk(G1Element.from_bytes(public_key)) async def get_new_puzzle(self) -> Program: dr = await self.wallet_state_manager.get_unused_derivation_record(self.id()) diff --git a/chia/wallet/wallet_state_manager.py b/chia/wallet/wallet_state_manager.py index 0b2c514073d1..b837f78c1b14 100644 --- a/chia/wallet/wallet_state_manager.py +++ b/chia/wallet/wallet_state_manager.py @@ -376,6 +376,16 @@ async def get_private_key(self, puzzle_hash: bytes32) -> PrivateKey: return master_sk_to_wallet_sk(self.get_master_private_key(), record.index) return master_sk_to_wallet_sk_unhardened(self.get_master_private_key(), record.index) + async def get_public_key(self, puzzle_hash: bytes32) -> bytes: + record = await self.puzzle_store.record_for_puzzle_hash(puzzle_hash) + if record is None: + raise ValueError(f"No key for puzzle hash: {puzzle_hash.hex()}") + if isinstance(record._pubkey, bytes): + pk_bytes = record._pubkey + else: + pk_bytes = bytes(record._pubkey) + return pk_bytes + def get_master_private_key(self) -> PrivateKey: if self.private_key is None: # pragma: no cover raise ValueError("Wallet is currently in observer mode and access to private key is denied") From 9e1a4dceb724d7ebf56e3d8a75e8692184997805 Mon Sep 17 00:00:00 2001 From: Florin Chirica Date: Wed, 26 Jun 2024 19:22:15 +0300 Subject: [PATCH 29/77] CHIA-732 Optimize min height leaf. (#18123) * Optimize min height leaf. * Lint. * Lint. * Add tests. * Lint. * Lint. * Parametrize test. * Lint. * Update chia/data_layer/data_store.py Co-authored-by: Kyle Altendorf * Address review comments. * Report all hashes. --------- Co-authored-by: Kyle Altendorf --- .../_tests/core/data_layer/test_data_store.py | 128 ++++++++++++++++++ chia/data_layer/data_store.py | 43 ++++-- 2 files changed, 159 insertions(+), 12 deletions(-) diff --git a/chia/_tests/core/data_layer/test_data_store.py b/chia/_tests/core/data_layer/test_data_store.py index 0e04d36310bf..b7e390073bc1 100644 --- a/chia/_tests/core/data_layer/test_data_store.py +++ b/chia/_tests/core/data_layer/test_data_store.py @@ -2147,3 +2147,131 @@ async def test_sparse_ancestor_table(data_store: DataStore, store_id: bytes32) - assert current_generation_count == 15 assert previous_generation_count == 184 + + +async def get_all_nodes(data_store: DataStore, store_id: bytes32) -> List[Node]: + root = await data_store.get_tree_root(store_id) + assert root.node_hash is not None + root_node = await data_store.get_node(root.node_hash) + nodes: List[Node] = [] + queue: List[Node] = [root_node] + + while len(queue) > 0: + node = queue.pop(0) + nodes.append(node) + if isinstance(node, InternalNode): + left_node = await data_store.get_node(node.left_hash) + right_node = await data_store.get_node(node.right_hash) + queue.append(left_node) + queue.append(right_node) + + return nodes + + +@pytest.mark.anyio +async def test_get_nodes(data_store: DataStore, store_id: bytes32) -> None: + num_values = 50 + changelist: List[Dict[str, Any]] = [] + + for value in range(num_values): + value_bytes = value.to_bytes(4, byteorder="big") + changelist.append({"action": "upsert", "key": value_bytes, "value": value_bytes}) + await data_store.insert_batch( + store_id=store_id, + changelist=changelist, + status=Status.COMMITTED, + ) + + expected_nodes = await get_all_nodes(data_store, store_id) + nodes = await data_store.get_nodes([node.hash for node in expected_nodes]) + assert nodes == expected_nodes + + node_hash = bytes32([0] * 32) + node_hash_2 = bytes32([0] * 31 + [1]) + with pytest.raises(Exception, match=f"^Nodes not found for hashes: {node_hash.hex()}, {node_hash_2.hex()}"): + await data_store.get_nodes([node_hash, node_hash_2] + [node.hash for node in expected_nodes]) + + +@pytest.mark.anyio +@pytest.mark.parametrize("pre", [0, 2048]) +@pytest.mark.parametrize("batch_size", [25, 100, 500]) +async def test_get_leaf_at_minimum_height( + data_store: DataStore, + store_id: bytes32, + pre: int, + batch_size: int, +) -> None: + num_values = 1000 + value_offset = 1000000 + all_min_leafs: Set[TerminalNode] = set() + + if pre > 0: + # This builds a complete binary tree, in order to test more than one batch in the queue before finding the leaf + changelist: List[Dict[str, Any]] = [] + + for value in range(pre): + value_bytes = (value * value).to_bytes(8, byteorder="big") + changelist.append({"action": "upsert", "key": value_bytes, "value": value_bytes}) + await data_store.insert_batch( + store_id=store_id, + changelist=changelist, + status=Status.COMMITTED, + ) + + for value in range(num_values): + value_bytes = value.to_bytes(4, byteorder="big") + # Use autoinsert instead of `insert_batch` to get a more randomly shaped tree + await data_store.autoinsert( + key=value_bytes, + value=value_bytes, + store_id=store_id, + status=Status.COMMITTED, + ) + + if (value + 1) % batch_size == 0: + hash_to_parent: Dict[bytes32, InternalNode] = {} + root = await data_store.get_tree_root(store_id) + assert root.node_hash is not None + min_leaf = await data_store.get_leaf_at_minimum_height(root.node_hash, hash_to_parent) + all_nodes = await get_all_nodes(data_store, store_id) + heights: Dict[bytes32, int] = {} + heights[root.node_hash] = 0 + min_leaf_height = None + + for node in all_nodes: + if isinstance(node, InternalNode): + heights[node.left_hash] = heights[node.hash] + 1 + heights[node.right_hash] = heights[node.hash] + 1 + else: + if min_leaf_height is not None: + min_leaf_height = min(min_leaf_height, heights[node.hash]) + else: + min_leaf_height = heights[node.hash] + + assert min_leaf_height is not None + if pre > 0: + assert min_leaf_height >= 11 + for node in all_nodes: + if isinstance(node, TerminalNode): + assert node == min_leaf + assert heights[min_leaf.hash] == min_leaf_height + break + if node.left_hash in hash_to_parent: + assert hash_to_parent[node.left_hash] == node + if node.right_hash in hash_to_parent: + assert hash_to_parent[node.right_hash] == node + + # Push down the min height leaf, so on the next iteration we get a different leaf + pushdown_height = 20 + for repeat in range(pushdown_height): + value_bytes = (value + (repeat + 1) * value_offset).to_bytes(4, byteorder="big") + await data_store.insert( + key=value_bytes, + value=value_bytes, + store_id=store_id, + reference_node_hash=min_leaf.hash, + side=Side.RIGHT, + status=Status.COMMITTED, + ) + assert min_leaf not in all_min_leafs + all_min_leafs.add(min_leaf) diff --git a/chia/data_layer/data_store.py b/chia/data_layer/data_store.py index 741a6495a854..a19b66252ac8 100644 --- a/chia/data_layer/data_store.py +++ b/chia/data_layer/data_store.py @@ -1395,23 +1395,42 @@ async def clean_node_table(self, writer: Optional[aiosqlite.Connection] = None) else: await writer.execute(query, params) + async def get_nodes(self, node_hashes: List[bytes32]) -> List[Node]: + query_parameter_place_holders = ",".join("?" for _ in node_hashes) + async with self.db_wrapper.reader() as reader: + # TODO: handle SQLITE_MAX_VARIABLE_NUMBER + cursor = await reader.execute( + f"SELECT * FROM node WHERE hash IN ({query_parameter_place_holders})", + [*node_hashes], + ) + rows = await cursor.fetchall() + + hash_to_node = {row["hash"]: row_to_node(row=row) for row in rows} + + missing_hashes = [node_hash.hex() for node_hash in node_hashes if node_hash not in hash_to_node] + if missing_hashes: + raise Exception(f"Nodes not found for hashes: {', '.join(missing_hashes)}") + + return [hash_to_node[node_hash] for node_hash in node_hashes] + async def get_leaf_at_minimum_height( self, root_hash: bytes32, hash_to_parent: Dict[bytes32, InternalNode] ) -> TerminalNode: - root_node = await self.get_node(root_hash) - queue: List[Node] = [root_node] + queue: List[bytes32] = [root_hash] + batch_size = min(500, SQLITE_MAX_VARIABLE_NUMBER - 10) + while True: assert len(queue) > 0 - node = queue.pop(0) - if isinstance(node, InternalNode): - left_node = await self.get_node(node.left_hash) - right_node = await self.get_node(node.right_hash) - hash_to_parent[left_node.hash] = node - hash_to_parent[right_node.hash] = node - queue.append(left_node) - queue.append(right_node) - elif isinstance(node, TerminalNode): - return node + nodes = await self.get_nodes(queue[:batch_size]) + queue = queue[batch_size:] + + for node in nodes: + if isinstance(node, TerminalNode): + return node + hash_to_parent[node.left_hash] = node + hash_to_parent[node.right_hash] = node + queue.append(node.left_hash) + queue.append(node.right_hash) async def batch_upsert( self, From bcbc7a7b50cf1a3a81b7548e602412ea2167f26f Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Wed, 26 Jun 2024 09:22:48 -0700 Subject: [PATCH 30/77] [CHIA-819] Fix 12-word mnemonic support in keychain (#18246) Fix 12 word mnemonic support in keychain --- chia/_tests/core/util/test_keychain.py | 121 +++++++++++++++++-------- chia/util/keychain.py | 2 +- 2 files changed, 85 insertions(+), 38 deletions(-) diff --git a/chia/_tests/core/util/test_keychain.py b/chia/_tests/core/util/test_keychain.py index 87c26dc0b80d..06e6217c0ec0 100644 --- a/chia/_tests/core/util/test_keychain.py +++ b/chia/_tests/core/util/test_keychain.py @@ -2,7 +2,7 @@ import json import random -from dataclasses import replace +from dataclasses import dataclass, replace from typing import Callable, List, Optional, Tuple import importlib_resources @@ -32,21 +32,55 @@ mnemonic_to_seed, ) -mnemonic = ( - "rapid this oven common drive ribbon bulb urban uncover napkin kitten usage enforce uncle unveil scene " - "apart wire mystery torch peanut august flee fantasy" + +@dataclass +class KeyInfo: + mnemonic: str + entropy: bytes + private_key: PrivateKey + fingerprint: uint32 + public_key: G1Element + bech32: str + + +_24keyinfo = KeyInfo( + mnemonic=( + "rapid this oven common drive ribbon bulb urban uncover napkin kitten usage enforce uncle unveil scene " + "apart wire mystery torch peanut august flee fantasy" + ), + entropy=bytes.fromhex("b1fc1a7717343572077f7aecb25ded77c4a3d93b9e040a5f8649f2aa1e1e5632"), + private_key=PrivateKey.from_bytes( + bytes.fromhex("6c6bb4cc3dae03b8d0b327dd6765834464a883f7ca7df134970842055efe8afc") + ), + fingerprint=uint32(1310648153), + public_key=G1Element.from_bytes( + bytes.fromhex( + "b5acf3599bc5fa5da1c00f6cc3d5bcf1560def67778b7f50a8c373a83f78761505b6250ab776e38a292e26628009aec4" + ) + ), + bech32="bls12381kkk0xkvmcha9mgwqpakv84du79tqmmm8w79h759gcde6s0mcwc2std39p2mhdcu29yhzvc5qpxhvgmknyl7", ) -entropy = bytes.fromhex("b1fc1a7717343572077f7aecb25ded77c4a3d93b9e040a5f8649f2aa1e1e5632") -private_key = PrivateKey.from_bytes(bytes.fromhex("6c6bb4cc3dae03b8d0b327dd6765834464a883f7ca7df134970842055efe8afc")) -fingerprint = uint32(1310648153) -public_key = G1Element.from_bytes( - bytes.fromhex("b5acf3599bc5fa5da1c00f6cc3d5bcf1560def67778b7f50a8c373a83f78761505b6250ab776e38a292e26628009aec4") +_12keyinfo = KeyInfo( + mnemonic=("steak rely trumpet cake banner easy consider cream marriage harvest truly shrimp"), + entropy=bytes.fromhex("d516afa61021248b8bd197884d2fa5e3"), + private_key=PrivateKey.from_bytes( + bytes.fromhex("3aaec6598281320c4918a2d6ebf4c2bacabad5f85a45569fc3ba5159e13f94bf") + ), + fingerprint=uint32(688295223), + public_key=G1Element.from_bytes( + bytes.fromhex( + "a9e652cb551d5978a9ee4b7aa52a4e826078a54b08a3d903c38611cb8a804a9a29c926e4f8549314a079e04ecde10cc1" + ) + ), + bech32="bls12381148n99j64r4vh320wfda222jwsfs83f2tpz3ajq7rscguhz5qf2dznjfxunu9fyc55pu7qnkduyxvzqskawt", ) -bech32_pubkey = "bls12381kkk0xkvmcha9mgwqpakv84du79tqmmm8w79h759gcde6s0mcwc2std39p2mhdcu29yhzvc5qpxhvgmknyl7" class TestKeychain: - def test_basic_add_delete(self, empty_temp_file_keyring: TempKeyring, seeded_random: random.Random): + @pytest.mark.parametrize("key_info", [_24keyinfo, _12keyinfo]) + def test_basic_add_delete( + self, key_info: KeyInfo, empty_temp_file_keyring: TempKeyring, seeded_random: random.Random + ): kc: Keychain = Keychain(user="testing-1.8.0", service="chia-testing-1.8.0") kc.delete_all_keys() @@ -55,7 +89,7 @@ def test_basic_add_delete(self, empty_temp_file_keyring: TempKeyring, seeded_ran assert kc.get_first_private_key() is None assert kc.get_first_public_key() is None - mnemonic = generate_mnemonic() + mnemonic = key_info.mnemonic entropy = bytes_from_mnemonic(mnemonic) assert bytes_to_mnemonic(entropy) == mnemonic mnemonic_2 = generate_mnemonic() @@ -96,10 +130,10 @@ def test_basic_add_delete(self, empty_temp_file_keyring: TempKeyring, seeded_ran assert kc._get_free_private_key_index() == 0 assert len(kc.get_all_private_keys()) == 0 - kc.add_key(bech32_pubkey, label=None, private=False) + kc.add_key(key_info.bech32, label=None, private=False) all_pks = kc.get_all_public_keys() assert len(all_pks) == 1 - assert all_pks[0] == public_key + assert all_pks[0] == key_info.public_key kc.delete_all_keys() kc.add_key(bytes_to_mnemonic(bytes32.random(seeded_random))) @@ -219,14 +253,17 @@ def test_key_data_secrets_generate() -> None: @pytest.mark.parametrize( - "input_data, from_method", [(mnemonic, KeyDataSecrets.from_mnemonic), (entropy, KeyDataSecrets.from_entropy)] + "get_item, from_method", [("mnemonic", KeyDataSecrets.from_mnemonic), ("entropy", KeyDataSecrets.from_entropy)] ) -def test_key_data_secrets_creation(input_data: object, from_method: Callable[..., KeyDataSecrets]) -> None: - secrets = from_method(input_data) - assert secrets.mnemonic == mnemonic.split() - assert secrets.mnemonic_str() == mnemonic - assert secrets.entropy == entropy - assert secrets.private_key == private_key +@pytest.mark.parametrize("key_info", [_24keyinfo, _12keyinfo]) +def test_key_data_secrets_creation( + key_info: KeyInfo, get_item: str, from_method: Callable[..., KeyDataSecrets] +) -> None: + secrets = from_method(getattr(key_info, get_item)) + assert secrets.mnemonic == key_info.mnemonic.split() + assert secrets.mnemonic_str() == key_info.mnemonic + assert secrets.entropy == key_info.entropy + assert secrets.private_key == key_info.private_key @pytest.mark.parametrize("label", [None, "key"]) @@ -241,21 +278,23 @@ def test_key_data_generate(label: Optional[str]) -> None: @pytest.mark.parametrize("label", [None, "key"]) @pytest.mark.parametrize( - "input_data, from_method", [(mnemonic, KeyData.from_mnemonic), (entropy, KeyData.from_entropy)] + "get_item, from_method", [("mnemonic", KeyData.from_mnemonic), ("entropy", KeyData.from_entropy)] ) -def test_key_data_creation(input_data: object, from_method: Callable[..., KeyData], label: Optional[str]) -> None: - key_data = from_method(input_data, label) - assert key_data.fingerprint == fingerprint - assert key_data.public_key == public_key - assert key_data.mnemonic == mnemonic.split() - assert key_data.mnemonic_str() == mnemonic - assert key_data.entropy == entropy - assert key_data.private_key == private_key +@pytest.mark.parametrize("key_info", [_24keyinfo, _12keyinfo]) +def test_key_data_creation(label: str, key_info: KeyInfo, get_item: str, from_method: Callable[..., KeyData]) -> None: + key_data = from_method(getattr(key_info, get_item), label) + assert key_data.fingerprint == key_info.fingerprint + assert key_data.public_key == key_info.public_key + assert key_data.mnemonic == key_info.mnemonic.split() + assert key_data.mnemonic_str() == key_info.mnemonic + assert key_data.entropy == key_info.entropy + assert key_data.private_key == key_info.private_key assert key_data.label == label -def test_key_data_without_secrets() -> None: - key_data = KeyData(fingerprint, public_key, None, None) +@pytest.mark.parametrize("key_info", [_24keyinfo, _12keyinfo]) +def test_key_data_without_secrets(key_info: KeyInfo) -> None: + key_data = KeyData(key_info.fingerprint, key_info.public_key, None, None) assert key_data.secrets is None with pytest.raises(KeychainSecretsMissing): @@ -274,9 +313,9 @@ def test_key_data_without_secrets() -> None: @pytest.mark.parametrize( "input_data, data_type", [ - ((mnemonic.split()[:-1], entropy, private_key), "mnemonic"), - ((mnemonic.split(), KeyDataSecrets.generate().entropy, private_key), "entropy"), - ((mnemonic.split(), entropy, KeyDataSecrets.generate().private_key), "private_key"), + ((_24keyinfo.mnemonic.split()[:-1], _24keyinfo.entropy, _24keyinfo.private_key), "mnemonic"), + ((_24keyinfo.mnemonic.split(), KeyDataSecrets.generate().entropy, _24keyinfo.private_key), "entropy"), + ((_24keyinfo.mnemonic.split(), _24keyinfo.entropy, KeyDataSecrets.generate().private_key), "private_key"), ], ) def test_key_data_secrets_post_init(input_data: Tuple[List[str], bytes, PrivateKey], data_type: str) -> None: @@ -287,8 +326,16 @@ def test_key_data_secrets_post_init(input_data: Tuple[List[str], bytes, PrivateK @pytest.mark.parametrize( "input_data, data_type", [ - ((fingerprint, G1Element(), None, KeyDataSecrets(mnemonic.split(), entropy, private_key)), "public_key"), - ((fingerprint, G1Element(), None, None), "fingerprint"), + ( + ( + _24keyinfo.fingerprint, + G1Element(), + None, + KeyDataSecrets(_24keyinfo.mnemonic.split(), _24keyinfo.entropy, _24keyinfo.private_key), + ), + "public_key", + ), + ((_24keyinfo.fingerprint, G1Element(), None, None), "fingerprint"), ], ) def test_key_data_post_init( diff --git a/chia/util/keychain.py b/chia/util/keychain.py index fcf7918ffc80..b7c16e73ab19 100644 --- a/chia/util/keychain.py +++ b/chia/util/keychain.py @@ -316,7 +316,7 @@ def _get_key_data(self, index: int, include_secrets: bool = True) -> KeyData: public_key = G1Element.from_bytes(str_bytes[: G1Element.SIZE]) fingerprint = public_key.get_fingerprint() - if len(str_bytes) == G1Element.SIZE + 32: + if len(str_bytes) > G1Element.SIZE: entropy = str_bytes[G1Element.SIZE : G1Element.SIZE + 32] else: entropy = None From b6b61999ea5ace43ea4d625fa2156593081d93df Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Wed, 26 Jun 2024 12:23:15 -0400 Subject: [PATCH 31/77] correct more macos casing (#18249) --- .github/workflows/build-macos-installers.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-macos-installers.yml b/.github/workflows/build-macos-installers.yml index 1e70511e45cd..217458ec887f 100644 --- a/.github/workflows/build-macos-installers.yml +++ b/.github/workflows/build-macos-installers.yml @@ -357,7 +357,7 @@ jobs: run: | aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg s3://download.chia.net/install/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.sha256 s3://download.chia.net/install/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.sha256 - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent s3://download.chia.net/torrents/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent s3://download.chia.net/torrents/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent - name: Upload release artifacts if: env.RELEASE == 'true' From abcbb97a53d87651ffe01a75fedb8f93f8db5cdf Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Thu, 27 Jun 2024 11:47:22 -0700 Subject: [PATCH 32/77] Update to macos-12 for build and remove macos-11 (cherrypick from main) (#18257) CHIA-802: Update to macos-12 for build and remove macos-11 (#18238) * Update to macos-12 for build and remove macos-11 * Update MACOSX_DEPLOYMENT_TARGET --- .github/workflows/build-macos-installers.yml | 9 ++------- .github/workflows/check_wheel_availability.yaml | 2 +- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build-macos-installers.yml b/.github/workflows/build-macos-installers.yml index 217458ec887f..edc111dd1d73 100644 --- a/.github/workflows/build-macos-installers.yml +++ b/.github/workflows/build-macos-installers.yml @@ -49,7 +49,7 @@ jobs: matrix: python-version: ["3.10"] os: - - runs-on: macos-11 + - runs-on: macos-12 name: intel bladebit-suffix: macos-x86-64.tar.gz - runs-on: [MacOS, ARM64] @@ -74,7 +74,7 @@ jobs: uses: Chia-Network/actions/setjobenv@main env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - MACOSX_DEPLOYMENT_TARGET: 11 + MACOSX_DEPLOYMENT_TARGET: 12 - name: Check tag type shell: bash @@ -391,11 +391,6 @@ jobs: fail-fast: false matrix: os: - - name: 11 - matrix: 11 - runs-on: - intel: macos-11 - arm: [macos, arm64] - name: 12 matrix: 12 runs-on: diff --git a/.github/workflows/check_wheel_availability.yaml b/.github/workflows/check_wheel_availability.yaml index 214e97d9f12e..04f85d37430f 100644 --- a/.github/workflows/check_wheel_availability.yaml +++ b/.github/workflows/check_wheel_availability.yaml @@ -33,7 +33,7 @@ jobs: - name: macOS matrix: macos runs-on: - intel: macos-11 + intel: macos-12 arm: [macos, arm64] - name: Windows matrix: windows From 03e2444f580031a9c561b09b19f4607cd620c08c Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Fri, 28 Jun 2024 10:42:06 -0400 Subject: [PATCH 33/77] avoid some diff coverage processing failures (#18196) --- .github/workflows/test.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 90a8b140c077..6f49bb5340a4 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -284,11 +284,17 @@ jobs: set -o pipefail coverage report --rcfile=.coveragerc --data-file=coverage-reports/.coverage --include='chia/_tests/**/*' --show-missing | tee coverage-reports/coverage-tests-stdout + - name: Identify parent commit + id: parent-commit + run: | + echo hash=$(git rev-parse HEAD~1) >> "$GITHUB_OUTPUT" + - name: Coverage report (diff) if: (github.base_ref != '' || github.event.before != '') && always() env: - compare-branch: ${{ github.base_ref == '' && github.event.before || format('origin/{0}', github.base_ref) }} + compare-branch: ${{ github.base_ref == '' && steps.parent-commit.hash || format('origin/{0}', github.base_ref) }} run: | + set -o pipefail diff-cover --config-file=.diffcover.toml --compare-branch=${{ env.compare-branch }} --fail-under=100 --html-report=coverage-reports/diff-cover.html --markdown-report=coverage-reports/diff-cover.md coverage-reports/coverage.xml | tee coverage-reports/diff-cover-stdout COV_STATUS="${PIPESTATUS[0]}" echo "COV_STATUS=$COV_STATUS" >> "$GITHUB_ENV" From 0970ba70dac60d753ea16d0908a6dc415dc6de56 Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Fri, 28 Jun 2024 11:44:44 -0700 Subject: [PATCH 34/77] Fix backwards compatibility for add_private_key (cherrypick from main) (#18256) Fix backwards compatibility for `add_private_key` (#18237) * Fix backwards compatibility for `add_private_key` * Another backwards compatibility fix Co-authored-by: Matt Hauff --- chia/daemon/keychain_server.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/chia/daemon/keychain_server.py b/chia/daemon/keychain_server.py index b5b506c560f1..57edc5c387e4 100644 --- a/chia/daemon/keychain_server.py +++ b/chia/daemon/keychain_server.py @@ -174,9 +174,9 @@ def get_keychain_for_request(self, request: Dict[str, Any]) -> Keychain: async def handle_command(self, command: str, data: Dict[str, Any]) -> Dict[str, Any]: try: if command == "add_private_key": - return await self.add_key( - {"mnemonic_or_pk": data.get("mnemonic", None), "label": data.get("label", None), "private": True} - ) + data["private"] = True + data["mnemonic_or_pk"] = data.get("mnemonic_or_pk", data.get("mnemonic", None)) + return await self.add_key(data) elif command == "add_key": return await self.add_key(data) elif command == "check_keys": From ef09cafadede693262985b88d0944a03116c618f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 2 Jul 2024 09:31:04 -0700 Subject: [PATCH 35/77] CA Cert updates (#18265) adding ca updates Co-authored-by: ChiaAutomation --- mozilla-ca | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mozilla-ca b/mozilla-ca index 0d3a67c2d346..7da6b4833244 160000 --- a/mozilla-ca +++ b/mozilla-ca @@ -1 +1 @@ -Subproject commit 0d3a67c2d3461b74903a951292c95733166a5bad +Subproject commit 7da6b48332442b0936ccd2bea649ccba449b9d8b From 7245d47c77a503ac624694976c4b19c6eaf0b51c Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Wed, 3 Jul 2024 17:22:13 +0100 Subject: [PATCH 36/77] CHIA-843 Simplify add_to_pool around DB operations (#18259) Simplify add_to_pool around DB operations. --- chia/full_node/mempool.py | 119 +++++++++++++++++++------------------- 1 file changed, 58 insertions(+), 61 deletions(-) diff --git a/chia/full_node/mempool.py b/chia/full_node/mempool.py index 85998af83c17..f8c8067f9874 100644 --- a/chia/full_node/mempool.py +++ b/chia/full_node/mempool.py @@ -371,67 +371,66 @@ def add_to_pool(self, item: MempoolItem) -> MempoolAddInfo: removals: List[MempoolRemoveInfo] = [] - with self._db_conn: - # we have certain limits on transactions that will expire soon - # (in the next 15 minutes) - block_cutoff = self._block_height + 48 - time_cutoff = self._timestamp + 900 - if (item.assert_before_height is not None and item.assert_before_height < block_cutoff) or ( - item.assert_before_seconds is not None and item.assert_before_seconds < time_cutoff - ): - # this lists only transactions that expire soon, in order of - # lowest fee rate along with the cumulative cost of such - # transactions counting from highest to lowest fee rate - cursor = self._db_conn.execute( - """ - SELECT name, - fee_per_cost, - SUM(cost) OVER (ORDER BY fee_per_cost DESC, seq ASC) AS cumulative_cost - FROM tx - WHERE assert_before_seconds IS NOT NULL AND assert_before_seconds < ? - OR assert_before_height IS NOT NULL AND assert_before_height < ? - ORDER BY cumulative_cost DESC - """, - (time_cutoff, block_cutoff), - ) - to_remove: List[bytes32] = [] - for row in cursor: - name, fee_per_cost, cumulative_cost = row - - # there's space for us, stop pruning - if cumulative_cost + item.cost <= self.mempool_info.max_block_clvm_cost: - break + # we have certain limits on transactions that will expire soon + # (in the next 15 minutes) + block_cutoff = self._block_height + 48 + time_cutoff = self._timestamp + 900 + if (item.assert_before_height is not None and item.assert_before_height < block_cutoff) or ( + item.assert_before_seconds is not None and item.assert_before_seconds < time_cutoff + ): + # this lists only transactions that expire soon, in order of + # lowest fee rate along with the cumulative cost of such + # transactions counting from highest to lowest fee rate + cursor = self._db_conn.execute( + """ + SELECT name, + fee_per_cost, + SUM(cost) OVER (ORDER BY fee_per_cost DESC, seq ASC) AS cumulative_cost + FROM tx + WHERE assert_before_seconds IS NOT NULL AND assert_before_seconds < ? + OR assert_before_height IS NOT NULL AND assert_before_height < ? + ORDER BY cumulative_cost DESC + """, + (time_cutoff, block_cutoff), + ) + to_remove: List[bytes32] = [] + for row in cursor: + name, fee_per_cost, cumulative_cost = row - # we can't evict any more transactions, abort (and don't - # evict what we put aside in "to_remove" list) - if fee_per_cost > item.fee_per_cost: - return MempoolAddInfo([], Err.INVALID_FEE_LOW_FEE) - to_remove.append(name) + # there's space for us, stop pruning + if cumulative_cost + item.cost <= self.mempool_info.max_block_clvm_cost: + break - removals.append(self.remove_from_pool(to_remove, MempoolRemoveReason.EXPIRED)) + # we can't evict any more transactions, abort (and don't + # evict what we put aside in "to_remove" list) + if fee_per_cost > item.fee_per_cost: + return MempoolAddInfo([], Err.INVALID_FEE_LOW_FEE) + to_remove.append(name) - # if we don't find any entries, it's OK to add this entry + removals.append(self.remove_from_pool(to_remove, MempoolRemoveReason.EXPIRED)) - if self._total_cost + item.cost > self.mempool_info.max_size_in_cost: - # pick the items with the lowest fee per cost to remove - cursor = self._db_conn.execute( - """SELECT name FROM tx - WHERE name NOT IN ( - SELECT name FROM ( - SELECT name, - SUM(cost) OVER (ORDER BY fee_per_cost DESC, seq ASC) AS total_cost - FROM tx) AS tx_with_cost - WHERE total_cost <= ?) - """, - (self.mempool_info.max_size_in_cost - item.cost,), - ) - to_remove = [bytes32(row[0]) for row in cursor] + # if we don't find any entries, it's OK to add this entry - removals.append(self.remove_from_pool(to_remove, MempoolRemoveReason.POOL_FULL)) + if self._total_cost + item.cost > self.mempool_info.max_size_in_cost: + # pick the items with the lowest fee per cost to remove + cursor = self._db_conn.execute( + """SELECT name FROM tx + WHERE name NOT IN ( + SELECT name FROM ( + SELECT name, + SUM(cost) OVER (ORDER BY fee_per_cost DESC, seq ASC) AS total_cost + FROM tx) AS tx_with_cost + WHERE total_cost <= ?) + """, + (self.mempool_info.max_size_in_cost - item.cost,), + ) + to_remove = [bytes32(row[0]) for row in cursor] + removals.append(self.remove_from_pool(to_remove, MempoolRemoveReason.POOL_FULL)) + with self._db_conn as conn: # TODO: In the future, for the "fee_per_cost" field, opt for # "GENERATED ALWAYS AS (CAST(fee AS REAL) / cost) VIRTUAL" - self._db_conn.execute( + conn.execute( "INSERT INTO " "tx(name,cost,fee,assert_height,assert_before_height,assert_before_seconds,fee_per_cost) " "VALUES(?, ?, ?, ?, ?, ?, ?)", @@ -445,16 +444,14 @@ def add_to_pool(self, item: MempoolItem) -> MempoolAddInfo: item.fee / item.cost, ), ) - all_coin_spends = [(s.coin_id, item.name) for s in item.npc_result.conds.spends] - self._db_conn.executemany("INSERT INTO spends VALUES(?, ?)", all_coin_spends) + conn.executemany("INSERT INTO spends VALUES(?, ?)", all_coin_spends) - self._items[item.name] = InternalMempoolItem( - item.spend_bundle, item.npc_result, item.height_added_to_mempool, item.bundle_coin_spends - ) - - self._total_cost += item.cost - self._total_fee += item.fee + self._items[item.name] = InternalMempoolItem( + item.spend_bundle, item.npc_result, item.height_added_to_mempool, item.bundle_coin_spends + ) + self._total_cost += item.cost + self._total_fee += item.fee info = FeeMempoolInfo(self.mempool_info, self.total_mempool_cost(), self.total_mempool_fees(), datetime.now()) self.fee_estimator.add_mempool_item(info, MempoolItemInfo(item.cost, item.fee, item.height_added_to_mempool)) From 8141fe40e21c6653568a496b53773ad83d1f77e6 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Wed, 3 Jul 2024 17:22:25 +0100 Subject: [PATCH 37/77] CHIA-826 Simplify get_items_by_coin_id and make it return an iterator (#18255) Simplify get_items_by_coin_id and make it return an iterator. --- .../core/mempool/test_mempool_manager.py | 4 ++-- chia/full_node/mempool.py | 20 +++++++++++++------ chia/full_node/mempool_manager.py | 2 +- chia/rpc/full_node_rpc_api.py | 3 +-- 4 files changed, 18 insertions(+), 11 deletions(-) diff --git a/chia/_tests/core/mempool/test_mempool_manager.py b/chia/_tests/core/mempool/test_mempool_manager.py index 6af883e87695..df5f10754ed8 100644 --- a/chia/_tests/core/mempool/test_mempool_manager.py +++ b/chia/_tests/core/mempool/test_mempool_manager.py @@ -1548,7 +1548,7 @@ async def get_coin_records(coin_ids: Collection[bytes32]) -> List[CoinRecord]: mempool_manager, [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, i]], coin ) assert result[1] == MempoolInclusionStatus.SUCCESS - assert len(mempool_manager.mempool.get_items_by_coin_id(coin_id)) == 3 + assert len(list(mempool_manager.mempool.get_items_by_coin_id(coin_id))) == 3 assert mempool_manager.mempool.size() == 3 assert len(list(mempool_manager.mempool.items_by_feerate())) == 3 # Setup a new peak where the incoming block has spent the coin @@ -1558,7 +1558,7 @@ async def get_coin_records(coin_ids: Collection[bytes32]) -> List[CoinRecord]: await mempool_manager.new_peak(block_record, [coin_id]) invariant_check_mempool(mempool_manager.mempool) # As the coin was a spend in all the mempool items we had, nothing should be left now - assert len(mempool_manager.mempool.get_items_by_coin_id(coin_id)) == 0 + assert len(list(mempool_manager.mempool.get_items_by_coin_id(coin_id))) == 0 assert mempool_manager.mempool.size() == 0 assert len(list(mempool_manager.mempool.items_by_feerate())) == 0 diff --git a/chia/full_node/mempool.py b/chia/full_node/mempool.py index f8c8067f9874..edf0b3771891 100644 --- a/chia/full_node/mempool.py +++ b/chia/full_node/mempool.py @@ -246,13 +246,21 @@ def get_item_by_id(self, item_id: bytes32) -> Optional[MempoolItem]: return None if row is None else self._row_to_item(row) # TODO: we need a bulk lookup function like this too - def get_items_by_coin_id(self, spent_coin_id: bytes32) -> List[MempoolItem]: - with self._db_conn: - cursor = self._db_conn.execute( - "SELECT * FROM tx WHERE name in (SELECT tx FROM spends WHERE coin_id=?)", - (spent_coin_id,), + def get_items_by_coin_id(self, spent_coin_id: bytes32) -> Iterator[MempoolItem]: + cursor = self._db_conn.execute( + """ + SELECT * + FROM tx + WHERE name IN ( + SELECT tx + FROM spends + WHERE coin_id = ? ) - return [self._row_to_item(row) for row in cursor] + """, + (spent_coin_id,), + ) + for row in cursor: + yield self._row_to_item(row) def get_items_by_coin_ids(self, spent_coin_ids: List[bytes32]) -> List[MempoolItem]: items: List[MempoolItem] = [] diff --git a/chia/full_node/mempool_manager.py b/chia/full_node/mempool_manager.py index feba2bb506bb..7a7737ee94fd 100644 --- a/chia/full_node/mempool_manager.py +++ b/chia/full_node/mempool_manager.py @@ -710,7 +710,7 @@ async def new_peak( # to deduplicate spendbundle_ids_to_remove: Set[bytes32] = set() for spend in spent_coins: - items: List[MempoolItem] = self.mempool.get_items_by_coin_id(spend) + items = self.mempool.get_items_by_coin_id(spend) for item in items: included_items.append(MempoolItemInfo(item.cost, item.fee, item.height_added_to_mempool)) self.remove_seen(item.name) diff --git a/chia/rpc/full_node_rpc_api.py b/chia/rpc/full_node_rpc_api.py index 83209ed7daef..c0f00c567278 100644 --- a/chia/rpc/full_node_rpc_api.py +++ b/chia/rpc/full_node_rpc_api.py @@ -23,7 +23,6 @@ from chia.types.full_block import FullBlock from chia.types.generator_types import BlockGenerator from chia.types.mempool_inclusion_status import MempoolInclusionStatus -from chia.types.mempool_item import MempoolItem from chia.types.spend_bundle import SpendBundle from chia.types.unfinished_header_block import UnfinishedHeaderBlock from chia.util.byte_types import hexstr_to_bytes @@ -833,7 +832,7 @@ async def get_mempool_items_by_coin_name(self, request: Dict[str, Any]) -> Endpo raise ValueError("No coin_name in request") coin_name: bytes32 = bytes32.from_hexstr(request["coin_name"]) - items: List[MempoolItem] = self.service.mempool_manager.mempool.get_items_by_coin_id(coin_name) + items = self.service.mempool_manager.mempool.get_items_by_coin_id(coin_name) return {"mempool_items": [item.to_json_dict() for item in items]} From bd750f9e94b083fa85b5214333d4ebc661d95ecc Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Wed, 3 Jul 2024 17:22:46 +0100 Subject: [PATCH 38/77] CHIA-817 Properly order HARD_FORK_2_0 and SOFT_FORK_4 in ConsensusMode (#18242) Properly order HARD_FORK_2_0 and SOFT_FORK_4 in ConsensusMode. --- chia/_tests/blockchain/test_blockchain.py | 2 +- chia/_tests/conftest.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index a97610d3fdf8..ad87e11f59fd 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -3269,7 +3269,7 @@ async def test_get_tx_peak_reorg( ) -> None: b = empty_blockchain - if consensus_mode < ConsensusMode.SOFT_FORK_4: + if consensus_mode < ConsensusMode.HARD_FORK_2_0: reorg_point = 13 else: reorg_point = 12 diff --git a/chia/_tests/conftest.py b/chia/_tests/conftest.py index aca1c38b788c..9cd5aa7c27c2 100644 --- a/chia/_tests/conftest.py +++ b/chia/_tests/conftest.py @@ -196,8 +196,8 @@ def get_keychain(): class ConsensusMode(ComparableEnum): PLAIN = 0 - SOFT_FORK_4 = 1 - HARD_FORK_2_0 = 2 + HARD_FORK_2_0 = 1 + SOFT_FORK_4 = 2 SOFT_FORK_5 = 3 From 9b5053b73233ebfdc6db145d84087d9c81e82060 Mon Sep 17 00:00:00 2001 From: Florin Chirica Date: Fri, 5 Jul 2024 19:49:29 +0300 Subject: [PATCH 39/77] [CHIA-884] Fix timelord log spam. (#18267) --- chia/timelord/timelord.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia/timelord/timelord.py b/chia/timelord/timelord.py index 21aca2993630..dddec0938adc 100644 --- a/chia/timelord/timelord.py +++ b/chia/timelord/timelord.py @@ -430,6 +430,7 @@ async def _submit_iterations(self) -> None: for iteration in self.iters_to_submit[chain]: if iteration in self.iters_submitted[chain]: continue + self.iters_submitted[chain].append(iteration) log.debug(f"Submitting iterations to {chain}: {iteration}") assert iteration > 0 prefix = str(len(str(iteration))) @@ -438,7 +439,6 @@ async def _submit_iterations(self) -> None: iter_str = prefix + str(iteration) writer.write(iter_str.encode()) await writer.drain() - self.iters_submitted[chain].append(iteration) def _clear_proof_list(self, iters: uint64) -> List[Tuple[Chain, VDFInfo, VDFProof, int]]: return [ From e6dbaef4c1653a1804c0eae58aa9672e5e000ea2 Mon Sep 17 00:00:00 2001 From: Florin Chirica Date: Fri, 5 Jul 2024 19:49:57 +0300 Subject: [PATCH 40/77] [CHIA-885] Send different uncompact buckets to blueboxes. (#18263) * Send different uncompact buckets to blueboxes. * Lint. * Add sleep interval before continue --- chia/full_node/full_node.py | 36 ++++++++++++++++++++++++++++-------- 1 file changed, 28 insertions(+), 8 deletions(-) diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index f8715a1ae1cf..88a3902b596f 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -2797,7 +2797,15 @@ async def broadcast_uncompact_blocks( broadcast_list: List[timelord_protocol.RequestCompactProofOfTime] = [] self.log.info("Getting random heights for bluebox to compact") - heights = await self.block_store.get_random_not_compactified(target_uncompact_proofs) + + if self._server is None: + self.log.info("Not broadcasting uncompact blocks, no server found") + await asyncio.sleep(uncompact_interval_scan) + continue + connected_timelords = self.server.get_connections(NodeType.TIMELORD) + + total_target_uncompact_proofs = target_uncompact_proofs * max(1, len(connected_timelords)) + heights = await self.block_store.get_random_not_compactified(total_target_uncompact_proofs) self.log.info("Heights found for bluebox to compact: [%s]", ", ".join(map(str, heights))) for h in heights: @@ -2870,17 +2878,29 @@ async def broadcast_uncompact_blocks( ) ) - if len(broadcast_list) > target_uncompact_proofs: - broadcast_list = broadcast_list[:target_uncompact_proofs] + broadcast_list_chunks: List[List[timelord_protocol.RequestCompactProofOfTime]] = [] + for index in range(0, len(broadcast_list), target_uncompact_proofs): + broadcast_list_chunks.append(broadcast_list[index : index + target_uncompact_proofs]) + if len(broadcast_list_chunks) == 0: + self.log.info("Did not find any uncompact blocks.") + await asyncio.sleep(uncompact_interval_scan) + continue if self.sync_store.get_sync_mode() or self.sync_store.get_long_sync(): + await asyncio.sleep(uncompact_interval_scan) continue if self._server is not None: self.log.info(f"Broadcasting {len(broadcast_list)} items to the bluebox") - msgs = [] - for new_pot in broadcast_list: - msg = make_msg(ProtocolMessageTypes.request_compact_proof_of_time, new_pot) - msgs.append(msg) - await self.server.send_to_all(msgs, NodeType.TIMELORD) + connected_timelords = self.server.get_connections(NodeType.TIMELORD) + chunk_index = 0 + for connection in connected_timelords: + peer_node_id = connection.peer_node_id + msgs = [] + broadcast_list = broadcast_list_chunks[chunk_index] + chunk_index = (chunk_index + 1) % len(broadcast_list_chunks) + for new_pot in broadcast_list: + msg = make_msg(ProtocolMessageTypes.request_compact_proof_of_time, new_pot) + msgs.append(msg) + await self.server.send_to_specific(msgs, peer_node_id) await asyncio.sleep(uncompact_interval_scan) except Exception as e: error_stack = traceback.format_exc() From 25154a10459749b30de66861e50f0bad1a8e55da Mon Sep 17 00:00:00 2001 From: Florin Chirica Date: Fri, 5 Jul 2024 19:50:17 +0300 Subject: [PATCH 41/77] [CHIA-883] Fix duplicate connection log spam. (#18268) * [CHIA-883] Fix duplicate connection log spam. * Add old condition too as a safety margin. --- chia/server/start_service.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/chia/server/start_service.py b/chia/server/start_service.py index 50c8afc79621..2accd724a38b 100644 --- a/chia/server/start_service.py +++ b/chia/server/start_service.py @@ -174,6 +174,11 @@ async def _connect_peers_task_handler(self) -> None: if any(connection.peer_info == resolved for connection in self._server.all_connections.values()): continue + if any( + connection.peer_info.host == resolved.host and connection.peer_server_port == resolved.port + for connection in self._server.all_connections.values() + ): + continue if not await self._server.start_client(resolved, None): self._log.info(f"Failed to connect to {resolved}") From 260c7a363a629f5596a3ccb688a300d2a95d1456 Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Fri, 5 Jul 2024 18:52:03 +0200 Subject: [PATCH 42/77] [CHIA-786] simplify hard-fork consensus rules (#18208) the hard fork has activated. From now on we can simplify some of the updated consensus rules to apply unconditionally to block height --- chia/_tests/blockchain/test_blockchain.py | 35 ++-------- chia/_tests/core/full_node/test_conditions.py | 17 +---- chia/_tests/core/mempool/test_mempool.py | 66 +++---------------- chia/full_node/mempool_check_conditions.py | 29 +------- chia/simulator/block_tools.py | 18 ++--- 5 files changed, 28 insertions(+), 137 deletions(-) diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index ad87e11f59fd..08e68638e4c9 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -5,7 +5,7 @@ import time from contextlib import asynccontextmanager from dataclasses import replace -from typing import AsyncIterator, Dict, List, Optional, Tuple +from typing import AsyncIterator, Dict, List, Optional import pytest from chia_rs import AugSchemeMPL, G2Element, MerkleSet @@ -2071,42 +2071,15 @@ async def test_timelock_conditions( ConditionOpcode.AGG_SIG_PARENT_PUZZLE, ], ) - @pytest.mark.parametrize( - "with_garbage,expected", - [ - (True, (AddBlockResult.INVALID_BLOCK, Err.INVALID_CONDITION, None)), - (False, (AddBlockResult.NEW_PEAK, None, 2)), - ], - ) + @pytest.mark.parametrize("with_garbage", [True, False]) async def test_aggsig_garbage( self, empty_blockchain: Blockchain, opcode: ConditionOpcode, with_garbage: bool, - expected: Tuple[AddBlockResult, Optional[Err], Optional[uint32]], bt: BlockTools, consensus_mode: ConsensusMode, ) -> None: - # in the 2.0 hard fork, we relax the strict 2-parameters rule of - # AGG_SIG_* conditions, in consensus mode. In mempool mode we always - # apply strict rules. - if consensus_mode >= ConsensusMode.HARD_FORK_2_0 and with_garbage: - expected = (AddBlockResult.NEW_PEAK, None, uint32(2)) - - # before the 2.0 hard fork, these conditions do not exist - # but WalletTool still lets us create them, and aggregate them into the - # block signature. When the pre-hard fork node sees them, the conditions - # are ignored, but the aggregate signature is corrupt. - if consensus_mode < ConsensusMode.HARD_FORK_2_0 and opcode in [ - ConditionOpcode.AGG_SIG_PARENT, - ConditionOpcode.AGG_SIG_PUZZLE, - ConditionOpcode.AGG_SIG_AMOUNT, - ConditionOpcode.AGG_SIG_PUZZLE_AMOUNT, - ConditionOpcode.AGG_SIG_PARENT_AMOUNT, - ConditionOpcode.AGG_SIG_PARENT_PUZZLE, - ]: - expected = (AddBlockResult.INVALID_BLOCK, Err.BAD_AGGREGATE_SIGNATURE, None) - b = empty_blockchain blocks = bt.get_consecutive_blocks( 3, @@ -2153,7 +2126,9 @@ async def test_aggsig_garbage( # Ignore errors from pre-validation, we are testing block_body_validation repl_preval_results = replace(pre_validation_results[0], error=None, required_iters=uint64(1)) res, error, state_change = await b.add_block(blocks[-1], repl_preval_results, None) - assert (res, error, state_change.fork_height if state_change else None) == expected + assert res == AddBlockResult.NEW_PEAK + assert error is None + assert state_change is not None and state_change.fork_height == uint32(2) @pytest.mark.anyio @pytest.mark.parametrize("with_garbage", [True, False]) diff --git a/chia/_tests/core/full_node/test_conditions.py b/chia/_tests/core/full_node/test_conditions.py index 6be002904ad0..b0042749ee32 100644 --- a/chia/_tests/core/full_node/test_conditions.py +++ b/chia/_tests/core/full_node/test_conditions.py @@ -144,10 +144,6 @@ async def test_unknown_conditions_with_cost( conditions = Program.to(assemble(f"(({opcode} 1337))")) additions, removals, new_block = await check_conditions(bt, conditions) - if consensus_mode < ConsensusMode.HARD_FORK_2_0: - # before the hard fork, all unknown conditions have 0 cost - expected_cost = 0 - # once the hard fork activates, blocks no longer pay the cost of the ROM # generator (which includes hashing all puzzles). if consensus_mode >= ConsensusMode.HARD_FORK_2_0: @@ -172,8 +168,6 @@ async def test_softfork_condition( additions, removals, new_block = await check_conditions(bt, conditions) if consensus_mode < ConsensusMode.HARD_FORK_2_0: - # the SOFTFORK condition is not recognized before the hard fork - expected_cost = 0 block_base_cost = 737056 else: # once the hard fork activates, blocks no longer pay the cost of the ROM @@ -533,16 +527,7 @@ async def test_agg_sig_illegal_suffix( assert c.AGG_SIG_PARENT_PUZZLE_ADDITIONAL_DATA == additional_data[ConditionOpcode.AGG_SIG_PARENT_PUZZLE] blocks = await initial_blocks(bt) - if consensus_mode < ConsensusMode.HARD_FORK_2_0 and opcode in [ - ConditionOpcode.AGG_SIG_PARENT, - ConditionOpcode.AGG_SIG_PUZZLE, - ConditionOpcode.AGG_SIG_AMOUNT, - ConditionOpcode.AGG_SIG_PUZZLE_AMOUNT, - ConditionOpcode.AGG_SIG_PARENT_AMOUNT, - ConditionOpcode.AGG_SIG_PARENT_PUZZLE, - ]: - expected_error = Err.BAD_AGGREGATE_SIGNATURE - elif opcode == ConditionOpcode.AGG_SIG_UNSAFE: + if opcode == ConditionOpcode.AGG_SIG_UNSAFE: expected_error = Err.INVALID_CONDITION else: expected_error = None diff --git a/chia/_tests/core/mempool/test_mempool.py b/chia/_tests/core/mempool/test_mempool.py index 867d0c0c9ea8..124f5336e238 100644 --- a/chia/_tests/core/mempool/test_mempool.py +++ b/chia/_tests/core/mempool/test_mempool.py @@ -2200,15 +2200,15 @@ def test_invalid_condition_args_terminator(self, softfork_height: uint32) -> Non assert npc_result.conds.spends[0].seconds_relative == 50 @pytest.mark.parametrize( - "mempool,operand,expected", + "mempool,operand", [ - (True, -1, Err.GENERATOR_RUNTIME_ERROR.value), - (False, -1, Err.GENERATOR_RUNTIME_ERROR.value), - (True, 1, None), - (False, 1, None), + (True, -1), + (False, -1), + (True, 1), + (False, 1), ], ) - def test_div(self, mempool: bool, operand: int, expected: Optional[int], softfork_height: uint32) -> None: + def test_div(self, mempool: bool, operand: int, softfork_height: uint32) -> None: # op_div is disallowed on negative numbers in the mempool, and after the # softfork npc_result = generator_condition_tester( @@ -2218,11 +2218,8 @@ def test_div(self, mempool: bool, operand: int, expected: Optional[int], softfor height=softfork_height, ) - # with the 2.0 hard fork, division with negative numbers is allowed - if operand < 0 and softfork_height >= test_constants.HARD_FORK_HEIGHT: - expected = None - - assert npc_result.error == expected + # after the 2.0 hard fork, division with negative numbers is allowed + assert npc_result.error is None def test_invalid_condition_list_terminator(self, softfork_height: uint32) -> None: # note how the list of conditions isn't correctly terminated with a @@ -2365,17 +2362,7 @@ def test_agg_sig_cost(self, condition: ConditionOpcode, softfork_height: uint32) else: generator_base_cost = 20512 - if softfork_height < test_constants.HARD_FORK_HEIGHT and condition in [ - ConditionOpcode.AGG_SIG_PARENT, - ConditionOpcode.AGG_SIG_PUZZLE, - ConditionOpcode.AGG_SIG_AMOUNT, - ConditionOpcode.AGG_SIG_PUZZLE_AMOUNT, - ConditionOpcode.AGG_SIG_PARENT_AMOUNT, - ConditionOpcode.AGG_SIG_PARENT_PUZZLE, - ]: - expected_cost = 0 - else: - expected_cost = ConditionCost.AGG_SIG.value + expected_cost = ConditionCost.AGG_SIG.value # this max cost is exactly enough for the AGG_SIG condition npc_result = generator_condition_tester( @@ -2420,41 +2407,12 @@ def test_agg_sig_extra_arg( ) -> None: pubkey = "0x" + bytes(G1Element.generator()).hex() - new_condition = condition in [ - ConditionOpcode.AGG_SIG_PARENT, - ConditionOpcode.AGG_SIG_PUZZLE, - ConditionOpcode.AGG_SIG_AMOUNT, - ConditionOpcode.AGG_SIG_PUZZLE_AMOUNT, - ConditionOpcode.AGG_SIG_PARENT_AMOUNT, - ConditionOpcode.AGG_SIG_PARENT_PUZZLE, - ] - - hard_fork_activated = softfork_height >= test_constants.HARD_FORK_HEIGHT - - expected_error = None - # in mempool mode, we don't allow extra arguments if mempool and extra_arg != "": expected_error = Err.INVALID_CONDITION.value - - # the original AGG_SIG_* conditions had a quirk (fixed in the hard fork) - # where they always required exactly two arguments, regardless of - # mempool or not. After the hard fork, they behave like all other - # conditions - if not new_condition and not hard_fork_activated and extra_arg != "": - expected_error = Err.INVALID_CONDITION.value - - # except before the hard fork has activated, new conditions are just - # unknown - if new_condition and not hard_fork_activated: + else: expected_error = None - # before the hard fork activates, the new conditions are unknown and - # fail in mempool mode, regardless of whether they have extra arguments - # or not - if new_condition and not hard_fork_activated and mempool: - expected_error = Err.INVALID_CONDITION.value - # this max cost is exactly enough for the AGG_SIG condition npc_result = generator_condition_tester( f'({condition[0]} {pubkey} "foobar"{extra_arg}) ', @@ -2567,10 +2525,6 @@ def test_softfork_condition( # in mempool all unknown conditions are always a failure if mempool: expect_error = Err.INVALID_CONDITION.value - # the SOFTFORK condition is only activated with the hard fork, so - # before then there are no errors - elif softfork_height < test_constants.HARD_FORK_HEIGHT: - expect_error = None assert npc_result.error == expect_error diff --git a/chia/full_node/mempool_check_conditions.py b/chia/full_node/mempool_check_conditions.py index 83f6c34af7da..07ee94df6340 100644 --- a/chia/full_node/mempool_check_conditions.py +++ b/chia/full_node/mempool_check_conditions.py @@ -40,7 +40,7 @@ def get_flags_for_height_and_constants(height: int, constants: ConsensusConstants) -> int: - flags = 0 + flags = ENABLE_SOFTFORK_CONDITION | ENABLE_BLS_OPS_OUTSIDE_GUARD | ENABLE_FIXED_DIV | AGG_SIG_ARGS | ALLOW_BACKREFS if height >= constants.SOFT_FORK4_HEIGHT: flags = flags | ENABLE_MESSAGE_CONDITIONS @@ -48,30 +48,6 @@ def get_flags_for_height_and_constants(height: int, constants: ConsensusConstant if height >= constants.SOFT_FORK5_HEIGHT: flags = flags | DISALLOW_INFINITY_G1 - if height >= constants.HARD_FORK_HEIGHT: - # the hard-fork initiated with 2.1. To activate June 2024 - # * costs are ascribed to some unknown condition codes, to allow for - # soft-forking in new conditions with cost - # * a new condition, SOFTFORK, is added which takes a first parameter to - # specify its cost. This allows soft-forks similar to the softfork - # operator - # * BLS operators introduced in the soft-fork (behind the softfork - # guard) are made available outside of the guard. - # * division with negative numbers are allowed, and round toward - # negative infinity - # * AGG_SIG_* conditions are allowed to have unknown additional - # arguments - # * Allow the block generator to be serialized with the improved clvm - # serialization format (with back-references) - flags = ( - flags - | ENABLE_SOFTFORK_CONDITION - | ENABLE_BLS_OPS_OUTSIDE_GUARD - | ENABLE_FIXED_DIV - | AGG_SIG_ARGS - | ALLOW_BACKREFS - ) - return flags @@ -83,7 +59,6 @@ def get_name_puzzle_conditions( height: uint32, constants: ConsensusConstants, ) -> NPCResult: - run_block = run_block_generator flags = get_flags_for_height_and_constants(height, constants) if mempool_mode: @@ -91,6 +66,8 @@ def get_name_puzzle_conditions( if height >= constants.HARD_FORK_HEIGHT: run_block = run_block_generator2 + else: + run_block = run_block_generator try: block_args = [bytes(gen) for gen in generator.generator_refs] diff --git a/chia/simulator/block_tools.py b/chia/simulator/block_tools.py index bff1b039ebd8..4c2201c78a01 100644 --- a/chia/simulator/block_tools.py +++ b/chia/simulator/block_tools.py @@ -1911,22 +1911,22 @@ def compute_cost_table() -> List[int]: CONDITION_COSTS = compute_cost_table() -def conditions_cost(conds: Program, hard_fork: bool) -> uint64: +def conditions_cost(conds: Program) -> uint64: condition_cost = 0 for cond in conds.as_iter(): condition = cond.first().as_atom() - if condition in [ConditionOpcode.AGG_SIG_UNSAFE, ConditionOpcode.AGG_SIG_ME]: - condition_cost += ConditionCost.AGG_SIG.value - elif condition == ConditionOpcode.CREATE_COIN: + if condition == ConditionOpcode.CREATE_COIN: condition_cost += ConditionCost.CREATE_COIN.value # after the 2.0 hard fork, two byte conditions (with no leading 0) # have costs. Account for that. - elif hard_fork and len(condition) == 2 and condition[0] != 0: + elif len(condition) == 2 and condition[0] != 0: condition_cost += CONDITION_COSTS[condition[1]] - elif hard_fork and condition == ConditionOpcode.SOFTFORK.value: + elif condition == ConditionOpcode.SOFTFORK.value: arg = cond.rest().first().as_int() condition_cost += arg * 10000 - elif hard_fork and condition in [ + elif condition in [ + ConditionOpcode.AGG_SIG_UNSAFE, + ConditionOpcode.AGG_SIG_ME, ConditionOpcode.AGG_SIG_PARENT, ConditionOpcode.AGG_SIG_PUZZLE, ConditionOpcode.AGG_SIG_AMOUNT, @@ -1974,7 +1974,7 @@ def compute_cost_test(generator: BlockGenerator, constants: ConsensusConstants, cost, result = puzzle._run(INFINITE_COST, MEMPOOL_MODE, solution) clvm_cost += cost - condition_cost += conditions_cost(result, height >= constants.HARD_FORK_HEIGHT) + condition_cost += conditions_cost(result) else: block_program_args = SerializedProgram.to([[bytes(g) for g in generator.generator_refs]]) @@ -1984,7 +1984,7 @@ def compute_cost_test(generator: BlockGenerator, constants: ConsensusConstants, # each condition item is: # (parent-coin-id puzzle-hash amount conditions) conditions = res.at("rrrf") - condition_cost += conditions_cost(conditions, height >= constants.HARD_FORK_HEIGHT) + condition_cost += conditions_cost(conditions) size_cost = len(bytes(generator.program)) * constants.COST_PER_BYTE From 6dceabe2d31bf0955246b3668cf20b5ead9306fb Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Mon, 8 Jul 2024 18:44:49 +0200 Subject: [PATCH 43/77] [CHIA-691] simplify `MempoolItem` (#18143) make MempoolItem and InternalMempoolItem contain the SpendBundleConditions directly, rather than the NPCResult. A mempool item is guaranteed to be valid, so the error state of NPCResult will never be engaged anyway. --- benchmarks/mempool-long-lived.py | 6 +-- chia/_tests/core/mempool/test_mempool.py | 2 +- .../core/mempool/test_mempool_item_queries.py | 5 +- .../core/mempool/test_mempool_manager.py | 47 +++++++++---------- .../mempool/test_singleton_fast_forward.py | 12 ++--- .../test_fee_estimation_integration.py | 3 +- chia/clvm/spend_sim.py | 6 +-- chia/full_node/full_node.py | 6 +-- chia/full_node/mempool.py | 16 +++---- chia/full_node/mempool_manager.py | 43 ++++++++--------- chia/rpc/full_node_rpc_api.py | 8 ++-- chia/types/eligible_coin_spends.py | 6 +-- chia/types/internal_mempool_item.py | 4 +- chia/types/mempool_item.py | 14 +++--- 14 files changed, 80 insertions(+), 98 deletions(-) diff --git a/benchmarks/mempool-long-lived.py b/benchmarks/mempool-long-lived.py index 3a1275e15dd0..94bffdec4ff7 100644 --- a/benchmarks/mempool-long-lived.py +++ b/benchmarks/mempool-long-lived.py @@ -114,9 +114,9 @@ async def get_coin_record(coin_ids: Collection[bytes32]) -> List[CoinRecord]: coin.name(): CoinRecord(coin, uint32(height // 2), uint32(0), False, uint64(timestamp // 2)) } spend_bundle_id = sb.name() - npc = await mempool.pre_validate_spendbundle(sb, None, spend_bundle_id) - assert npc is not None - await mempool.add_spend_bundle(sb, npc, spend_bundle_id, uint32(height)) + sbc = await mempool.pre_validate_spendbundle(sb, None, spend_bundle_id) + assert sbc is not None + await mempool.add_spend_bundle(sb, sbc, spend_bundle_id, uint32(height)) if height % 100 == 0: print( diff --git a/chia/_tests/core/mempool/test_mempool.py b/chia/_tests/core/mempool/test_mempool.py index 124f5336e238..bdf917bf0fce 100644 --- a/chia/_tests/core/mempool/test_mempool.py +++ b/chia/_tests/core/mempool/test_mempool.py @@ -107,7 +107,7 @@ def make_item( return MempoolItem( SpendBundle([], G2Element()), fee, - NPCResult(None, SpendBundleConditions([], 0, 0, 0, None, None, [], cost, 0, 0)), + SpendBundleConditions([], 0, 0, 0, None, None, [], cost, 0, 0), spend_bundle_name, uint32(0), assert_height, diff --git a/chia/_tests/core/mempool/test_mempool_item_queries.py b/chia/_tests/core/mempool/test_mempool_item_queries.py index bd41b3f8006f..898c9453bdbe 100644 --- a/chia/_tests/core/mempool/test_mempool_item_queries.py +++ b/chia/_tests/core/mempool/test_mempool_item_queries.py @@ -49,11 +49,12 @@ def make_item(coin_spends: List[CoinSpend]) -> MempoolItem: npc_result = get_name_puzzle_conditions( generator=generator, max_cost=INFINITE_COST, mempool_mode=True, height=uint32(0), constants=DEFAULT_CONSTANTS ) - bundle_coin_spends, fee = make_bundle_spends_map_and_fee(spend_bundle, npc_result) + assert npc_result.conds is not None + bundle_coin_spends, fee = make_bundle_spends_map_and_fee(spend_bundle, npc_result.conds) return MempoolItem( spend_bundle=spend_bundle, fee=fee, - npc_result=npc_result, + conds=npc_result.conds, spend_bundle_name=spend_bundle.name(), height_added_to_mempool=TEST_HEIGHT, bundle_coin_spends=bundle_coin_spends, diff --git a/chia/_tests/core/mempool/test_mempool_manager.py b/chia/_tests/core/mempool/test_mempool_manager.py index df5f10754ed8..5e16c69e50a2 100644 --- a/chia/_tests/core/mempool/test_mempool_manager.py +++ b/chia/_tests/core/mempool/test_mempool_manager.py @@ -11,7 +11,6 @@ from chia._tests.util.misc import invariant_check_mempool from chia._tests.util.setup_nodes import OldSimulatorsAndWallets from chia.consensus.constants import ConsensusConstants -from chia.consensus.cost_calculator import NPCResult from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.full_node.bundle_tools import simple_solution_generator from chia.full_node.mempool import MAX_SKIPPED_ITEMS, PRIORITY_TX_THRESHOLD @@ -385,8 +384,8 @@ def spend_bundle_from_conditions( async def add_spendbundle( mempool_manager: MempoolManager, sb: SpendBundle, sb_name: bytes32 ) -> Tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]: - npc_result = await mempool_manager.pre_validate_spendbundle(sb, None, sb_name) - ret = await mempool_manager.add_spend_bundle(sb, npc_result, sb_name, TEST_HEIGHT) + sbc = await mempool_manager.pre_validate_spendbundle(sb, None, sb_name) + ret = await mempool_manager.add_spend_bundle(sb, sbc, sb_name, TEST_HEIGHT) invariant_check_mempool(mempool_manager.mempool) return ret.cost, ret.status, ret.error @@ -404,14 +403,13 @@ async def generate_and_add_spendbundle( def make_bundle_spends_map_and_fee( - spend_bundle: SpendBundle, npc_result: NPCResult + spend_bundle: SpendBundle, conds: SpendBundleConditions ) -> Tuple[Dict[bytes32, BundleCoinSpend], uint64]: bundle_coin_spends: Dict[bytes32, BundleCoinSpend] = {} eligibility_and_additions: Dict[bytes32, EligibilityAndAdditions] = {} removals_amount = 0 additions_amount = 0 - assert npc_result.conds is not None - for spend in npc_result.conds.spends: + for spend in conds.spends: coin_id = bytes32(spend.coin_id) spend_additions = [] for puzzle_hash, amount, _ in spend.create_coin: @@ -443,11 +441,12 @@ def mempool_item_from_spendbundle(spend_bundle: SpendBundle) -> MempoolItem: npc_result = get_name_puzzle_conditions( generator=generator, max_cost=INFINITE_COST, mempool_mode=True, height=uint32(0), constants=DEFAULT_CONSTANTS ) - bundle_coin_spends, fee = make_bundle_spends_map_and_fee(spend_bundle, npc_result) + assert npc_result.conds is not None + bundle_coin_spends, fee = make_bundle_spends_map_and_fee(spend_bundle, npc_result.conds) return MempoolItem( spend_bundle=spend_bundle, fee=fee, - npc_result=npc_result, + conds=npc_result.conds, spend_bundle_name=spend_bundle.name(), height_added_to_mempool=TEST_HEIGHT, bundle_coin_spends=bundle_coin_spends, @@ -478,8 +477,8 @@ async def test_valid_addition_amount() -> None: conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, max_amount]] coin = Coin(IDENTITY_PUZZLE_HASH, IDENTITY_PUZZLE_HASH, max_amount) sb = spend_bundle_from_conditions(conditions, coin) - npc_result = await mempool_manager.pre_validate_spendbundle(sb, None, sb.name()) - assert npc_result.error is None + # ensure this does not throw + _ = await mempool_manager.pre_validate_spendbundle(sb, None, sb.name()) @pytest.mark.anyio @@ -530,8 +529,7 @@ async def test_minting_coin() -> None: mempool_manager = await instantiate_mempool_manager(zero_calls_get_coin_records) conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, TEST_COIN_AMOUNT]] sb = spend_bundle_from_conditions(conditions) - npc_result = await mempool_manager.pre_validate_spendbundle(sb, None, sb.name()) - assert npc_result.error is None + _ = await mempool_manager.pre_validate_spendbundle(sb, None, sb.name()) conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, TEST_COIN_AMOUNT + 1]] sb = spend_bundle_from_conditions(conditions) with pytest.raises(ValidationError, match="MINTING_COIN"): @@ -543,8 +541,7 @@ async def test_reserve_fee_condition() -> None: mempool_manager = await instantiate_mempool_manager(zero_calls_get_coin_records) conditions = [[ConditionOpcode.RESERVE_FEE, TEST_COIN_AMOUNT]] sb = spend_bundle_from_conditions(conditions) - npc_result = await mempool_manager.pre_validate_spendbundle(sb, None, sb.name()) - assert npc_result.error is None + _ = await mempool_manager.pre_validate_spendbundle(sb, None, sb.name()) conditions = [[ConditionOpcode.RESERVE_FEE, TEST_COIN_AMOUNT + 1]] sb = spend_bundle_from_conditions(conditions) with pytest.raises(ValidationError, match="RESERVE_FEE_CONDITION_FAILED"): @@ -750,11 +747,11 @@ def mk_item( coin_spend=spend, eligible_for_dedup=False, eligible_for_fast_forward=False, additions=[] ) spend_bundle = SpendBundle(coin_spends, G2Element()) - npc_result = NPCResult(None, make_test_conds(cost=cost, spend_ids=spend_ids)) + conds = make_test_conds(cost=cost, spend_ids=spend_ids) return MempoolItem( spend_bundle=spend_bundle, fee=uint64(fee), - npc_result=npc_result, + conds=conds, spend_bundle_name=spend_bundle.name(), height_added_to_mempool=uint32(0), assert_height=None if assert_height is None else uint32(assert_height), @@ -1388,10 +1385,9 @@ def test_dedup_info_nothing_to_do() -> None: ] sb = spend_bundle_from_conditions(conditions, TEST_COIN, sig) mempool_item = mempool_item_from_spendbundle(sb) - assert mempool_item.npc_result.conds is not None eligible_coin_spends = EligibleCoinSpends() unique_coin_spends, cost_saving, unique_additions = eligible_coin_spends.get_deduplication_info( - bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.npc_result.conds.cost + bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.conds.cost ) assert unique_coin_spends == sb.coin_spends assert cost_saving == 0 @@ -1407,11 +1403,11 @@ def test_dedup_info_eligible_1st_time() -> None: ] sb = spend_bundle_from_conditions(conditions, TEST_COIN) mempool_item = mempool_item_from_spendbundle(sb) - assert mempool_item.npc_result.conds is not None + assert mempool_item.conds is not None eligible_coin_spends = EligibleCoinSpends() solution = SerializedProgram.to(conditions) unique_coin_spends, cost_saving, unique_additions = eligible_coin_spends.get_deduplication_info( - bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.npc_result.conds.cost + bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.conds.cost ) assert unique_coin_spends == sb.coin_spends assert cost_saving == 0 @@ -1433,10 +1429,9 @@ def test_dedup_info_eligible_but_different_solution() -> None: conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 2]] sb = spend_bundle_from_conditions(conditions, TEST_COIN) mempool_item = mempool_item_from_spendbundle(sb) - assert mempool_item.npc_result.conds is not None with pytest.raises(ValueError, match="Solution is different from what we're deduplicating on"): eligible_coin_spends.get_deduplication_info( - bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.npc_result.conds.cost + bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.conds.cost ) @@ -1454,9 +1449,9 @@ def test_dedup_info_eligible_2nd_time_and_another_1st_time() -> None: sb2 = spend_bundle_from_conditions(second_conditions, TEST_COIN2) sb = SpendBundle.aggregate([sb1, sb2]) mempool_item = mempool_item_from_spendbundle(sb) - assert mempool_item.npc_result.conds is not None + assert mempool_item.conds is not None unique_coin_spends, cost_saving, unique_additions = eligible_coin_spends.get_deduplication_info( - bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.npc_result.conds.cost + bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.conds.cost ) # Only the eligible one that we encountered more than once gets deduplicated assert unique_coin_spends == sb2.coin_spends @@ -1502,9 +1497,9 @@ def test_dedup_info_eligible_3rd_time_another_2nd_time_and_one_non_eligible() -> sb3 = spend_bundle_from_conditions(sb3_conditions, TEST_COIN3, sig) sb = SpendBundle.aggregate([sb1, sb2, sb3]) mempool_item = mempool_item_from_spendbundle(sb) - assert mempool_item.npc_result.conds is not None + assert mempool_item.conds is not None unique_coin_spends, cost_saving, unique_additions = eligible_coin_spends.get_deduplication_info( - bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.npc_result.conds.cost + bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.conds.cost ) assert unique_coin_spends == sb3.coin_spends saved_cost2 = uint64(1800044) diff --git a/chia/_tests/core/mempool/test_singleton_fast_forward.py b/chia/_tests/core/mempool/test_singleton_fast_forward.py index b1fa8a300fc3..8fc4eae2691e 100644 --- a/chia/_tests/core/mempool/test_singleton_fast_forward.py +++ b/chia/_tests/core/mempool/test_singleton_fast_forward.py @@ -54,9 +54,7 @@ async def get_unspent_lineage_info_for_puzzle_hash(_: bytes32) -> Optional[Unspe item = mempool_item_from_spendbundle(sb) # This coin is not eligible for fast forward assert item.bundle_coin_spends[TEST_COIN_ID].eligible_for_fast_forward is False - internal_mempool_item = InternalMempoolItem( - sb, item.npc_result, item.height_added_to_mempool, item.bundle_coin_spends - ) + internal_mempool_item = InternalMempoolItem(sb, item.conds, item.height_added_to_mempool, item.bundle_coin_spends) original_version = dataclasses.replace(internal_mempool_item) eligible_coin_spends = EligibleCoinSpends() await eligible_coin_spends.process_fast_forward_spends( @@ -87,9 +85,7 @@ async def get_unspent_lineage_info_for_puzzle_hash(puzzle_hash: bytes32) -> Opti item = mempool_item_from_spendbundle(sb) # The coin is eligible for fast forward assert item.bundle_coin_spends[test_coin.name()].eligible_for_fast_forward is True - internal_mempool_item = InternalMempoolItem( - sb, item.npc_result, item.height_added_to_mempool, item.bundle_coin_spends - ) + internal_mempool_item = InternalMempoolItem(sb, item.conds, item.height_added_to_mempool, item.bundle_coin_spends) eligible_coin_spends = EligibleCoinSpends() # We have no fast forward records yet, so we'll process this coin for the # first time here, but the DB lookup will return None @@ -131,9 +127,7 @@ async def get_unspent_lineage_info_for_puzzle_hash(puzzle_hash: bytes32) -> Opti sb = spend_bundle_from_conditions(conditions, test_coin) item = mempool_item_from_spendbundle(sb) assert item.bundle_coin_spends[test_coin.name()].eligible_for_fast_forward is True - internal_mempool_item = InternalMempoolItem( - sb, item.npc_result, item.height_added_to_mempool, item.bundle_coin_spends - ) + internal_mempool_item = InternalMempoolItem(sb, item.conds, item.height_added_to_mempool, item.bundle_coin_spends) original_version = dataclasses.replace(internal_mempool_item) eligible_coin_spends = EligibleCoinSpends() await eligible_coin_spends.process_fast_forward_spends( diff --git a/chia/_tests/fee_estimation/test_fee_estimation_integration.py b/chia/_tests/fee_estimation/test_fee_estimation_integration.py index 3edd6054943e..6375b85ee8f0 100644 --- a/chia/_tests/fee_estimation/test_fee_estimation_integration.py +++ b/chia/_tests/fee_estimation/test_fee_estimation_integration.py @@ -11,7 +11,6 @@ instantiate_mempool_manager, zero_calls_get_coin_records, ) -from chia.consensus.cost_calculator import NPCResult from chia.full_node.bitcoin_fee_estimator import create_bitcoin_fee_estimator from chia.full_node.fee_estimation import ( EmptyFeeMempoolInfo, @@ -47,7 +46,7 @@ def make_mempoolitem() -> MempoolItem: mempool_item = MempoolItem( spend_bundle, fee, - NPCResult(None, conds), + conds, spend_bundle.name(), uint32(block_height), ) diff --git a/chia/clvm/spend_sim.py b/chia/clvm/spend_sim.py index eba6e99d5804..407a4b7335d2 100644 --- a/chia/clvm/spend_sim.py +++ b/chia/clvm/spend_sim.py @@ -337,14 +337,12 @@ def __init__(self, service: SpendSim) -> None: async def push_tx(self, spend_bundle: SpendBundle) -> Tuple[MempoolInclusionStatus, Optional[Err]]: try: spend_bundle_id = spend_bundle.name() - cost_result: NPCResult = await self.service.mempool_manager.pre_validate_spendbundle( - spend_bundle, None, spend_bundle_id - ) + sbc = await self.service.mempool_manager.pre_validate_spendbundle(spend_bundle, None, spend_bundle_id) except ValidationError as e: return MempoolInclusionStatus.FAILED, e.code assert self.service.mempool_manager.peak is not None info = await self.service.mempool_manager.add_spend_bundle( - spend_bundle, cost_result, spend_bundle_id, self.service.mempool_manager.peak.height + spend_bundle, sbc, spend_bundle_id, self.service.mempool_manager.peak.height ) return info.status, info.error diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 88a3902b596f..33028a86c65d 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -2350,6 +2350,7 @@ async def add_transaction( except Exception: self.mempool_manager.remove_seen(spend_name) raise + async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.low): if self.mempool_manager.get_spendbundle(spend_name) is not None: self.mempool_manager.remove_seen(spend_name) @@ -2400,8 +2401,7 @@ async def broadcast_added_tx( else: await self.server.send_to_all([msg], NodeType.FULL_NODE, current_peer.peer_node_id) - conds = mempool_item.npc_result.conds - assert conds is not None + conds = mempool_item.conds all_peers = { peer_id @@ -2457,7 +2457,7 @@ async def broadcast_removed_tx(self, mempool_removals: List[MempoolRemoveInfo]) for removal_info in mempool_removals: for internal_mempool_item in removal_info.items: - conds = internal_mempool_item.npc_result.conds + conds = internal_mempool_item.conds assert conds is not None hints_for_removals = await self.hint_store.get_hints([bytes32(spend.coin_id) for spend in conds.spends]) diff --git a/chia/full_node/mempool.py b/chia/full_node/mempool.py index edf0b3771891..41388b2c691b 100644 --- a/chia/full_node/mempool.py +++ b/chia/full_node/mempool.py @@ -142,7 +142,7 @@ def _row_to_item(self, row: sqlite3.Row) -> MempoolItem: return MempoolItem( item.spend_bundle, uint64(fee), - item.npc_result, + item.conds, name, uint32(item.height_added_to_mempool), assert_height, @@ -177,7 +177,7 @@ def items_with_coin_ids(self, coin_ids: Set[bytes32]) -> List[bytes32]: transaction_ids: List[bytes32] = [] for transaction_id, item in self._items.items(): - conds = item.npc_result.conds + conds = item.conds assert conds is not None for spend in conds.spends: @@ -206,7 +206,7 @@ def items_with_puzzle_hashes(self, puzzle_hashes: Set[bytes32], include_hints: b transaction_ids: List[bytes32] = [] for transaction_id, item in self._items.items(): - conds = item.npc_result.conds + conds = item.conds assert conds is not None for spend in conds.spends: @@ -374,7 +374,7 @@ def add_to_pool(self, item: MempoolItem) -> MempoolAddInfo: """ assert item.fee < MEMPOOL_ITEM_FEE_LIMIT - assert item.npc_result.conds is not None + assert item.conds is not None assert item.cost <= self.mempool_info.max_block_clvm_cost removals: List[MempoolRemoveInfo] = [] @@ -452,11 +452,11 @@ def add_to_pool(self, item: MempoolItem) -> MempoolAddInfo: item.fee / item.cost, ), ) - all_coin_spends = [(s.coin_id, item.name) for s in item.npc_result.conds.spends] + all_coin_spends = [(s.coin_id, item.name) for s in item.conds.spends] conn.executemany("INSERT INTO spends VALUES(?, ?)", all_coin_spends) self._items[item.name] = InternalMempoolItem( - item.spend_bundle, item.npc_result, item.height_added_to_mempool, item.bundle_coin_spends + item.spend_bundle, item.conds, item.height_added_to_mempool, item.bundle_coin_spends ) self._total_cost += item.cost self._total_fee += item.fee @@ -505,8 +505,8 @@ async def create_bundle_from_mempool_items( if not item_inclusion_filter(name): continue try: - assert item.npc_result.conds is not None - cost = item.npc_result.conds.cost + assert item.conds is not None + cost = item.conds.cost if skipped_items >= PRIORITY_TX_THRESHOLD: # If we've encountered `PRIORITY_TX_THRESHOLD` number of # transactions that don't fit in the remaining block size, diff --git a/chia/full_node/mempool_manager.py b/chia/full_node/mempool_manager.py index 7a7737ee94fd..77820bbe36e5 100644 --- a/chia/full_node/mempool_manager.py +++ b/chia/full_node/mempool_manager.py @@ -163,7 +163,7 @@ class NewPeakInfo: class NewPeakItem: transaction_id: bytes32 spend_bundle: SpendBundle - npc_result: NPCResult + conds: SpendBundleConditions class MempoolManager: @@ -309,7 +309,7 @@ async def pre_validate_spendbundle( new_spend_bytes: Optional[bytes], spend_name: bytes32, bls_cache: Optional[BLSCache] = None, - ) -> NPCResult: + ) -> SpendBundleConditions: """ Errors are included within the cached_result. This runs in another process so we don't block the main thread @@ -346,12 +346,15 @@ async def pre_validate_spendbundle( f"pre_validate_spendbundle took {duration:0.4f} seconds " f"for {spend_name} (queue-size: {self._worker_queue_size})", ) - return ret + if ret.error is not None: + raise ValidationError(Err(ret.error), "pre_validate_spendbundle failed") + assert ret.conds is not None + return ret.conds async def add_spend_bundle( self, new_spend: SpendBundle, - npc_result: NPCResult, + conds: SpendBundleConditions, spend_name: bytes32, first_added_height: uint32, get_coin_records: Optional[Callable[[Collection[bytes32]], Awaitable[List[CoinRecord]]]] = None, @@ -364,7 +367,7 @@ async def add_spend_bundle( Args: new_spend: spend bundle to validate and add - npc_result: result of running the clvm transaction in a fake block + conds: result of running the clvm transaction in a fake block spend_name: hash of the spend bundle data, passed in as an optimization Returns: @@ -383,7 +386,7 @@ async def add_spend_bundle( get_coin_records = self.get_coin_records err, item, remove_items = await self.validate_spend_bundle( new_spend, - npc_result, + conds, spend_name, first_added_height, get_coin_records, @@ -413,7 +416,7 @@ async def add_spend_bundle( async def validate_spend_bundle( self, new_spend: SpendBundle, - npc_result: NPCResult, + conds: SpendBundleConditions, spend_name: bytes32, first_added_height: uint32, get_coin_records: Callable[[Collection[bytes32]], Awaitable[List[CoinRecord]]], @@ -424,7 +427,7 @@ async def validate_spend_bundle( Args: new_spend: spend bundle to validate - npc_result: result of running the clvm transaction in a fake block + conds: result of running the clvm transaction in a fake block spend_name: hash of the spend bundle data, passed in as an optimization first_added_height: The block height that `new_spend` first entered this node's mempool. Used to estimate how long a spend has taken to be included on the chain. @@ -439,21 +442,15 @@ async def validate_spend_bundle( if self.peak is None: return Err.MEMPOOL_NOT_INITIALIZED, None, [] - assert npc_result.error is None - if npc_result.error is not None: - return Err(npc_result.error), None, [] + cost = conds.cost - cost = uint64(0 if npc_result.conds is None else npc_result.conds.cost) - log.debug(f"Cost: {cost}") - - assert npc_result.conds is not None removal_names: Set[bytes32] = set() additions_dict: Dict[bytes32, Coin] = {} addition_amount: int = 0 # Map of coin ID to eligibility information eligibility_and_additions: Dict[bytes32, EligibilityAndAdditions] = {} non_eligible_coin_ids: List[bytes32] = [] - for spend in npc_result.conds.spends: + for spend in conds.spends: coin_id = bytes32(spend.coin_id) removal_names.add(coin_id) spend_additions = [] @@ -561,7 +558,7 @@ async def validate_spend_bundle( return fail_reason, None, [] # Verify conditions, create hash_key list for aggsig check - for spend in npc_result.conds.spends: + for spend in conds.spends: coin_record: CoinRecord = removal_record_dict[bytes32(spend.coin_id)] # Check that the revealed removal puzzles actually match the puzzle hash if spend.puzzle_hash != coin_record.coin.puzzle_hash: @@ -577,12 +574,12 @@ async def validate_spend_bundle( assert self.peak.timestamp is not None tl_error: Optional[Err] = mempool_check_time_locks( removal_record_dict, - npc_result.conds, + conds, self.peak.height, self.peak.timestamp, ) - timelocks: TimelockConditions = compute_assert_height(removal_record_dict, npc_result.conds) + timelocks: TimelockConditions = compute_assert_height(removal_record_dict, conds) if timelocks.assert_before_height is not None and timelocks.assert_before_height <= timelocks.assert_height: # returning None as the "potential" means it failed. We won't store it @@ -594,7 +591,7 @@ async def validate_spend_bundle( potential = MempoolItem( new_spend, uint64(fees), - npc_result, + conds, spend_name, first_added_height, timelocks.assert_height, @@ -753,7 +750,7 @@ async def local_get_coin_records(names: Collection[bytes32]) -> List[CoinRecord] for item in old_pool.all_items(): info = await self.add_spend_bundle( item.spend_bundle, - item.npc_result, + item.conds, item.spend_bundle_name, item.height_added_to_mempool, local_get_coin_records, @@ -774,13 +771,13 @@ async def local_get_coin_records(names: Collection[bytes32]) -> List[CoinRecord] for item in potential_txs.values(): info = await self.add_spend_bundle( item.spend_bundle, - item.npc_result, + item.conds, item.spend_bundle_name, item.height_added_to_mempool, self.get_coin_records, ) if info.status == MempoolInclusionStatus.SUCCESS: - txs_added.append(NewPeakItem(item.spend_bundle_name, item.spend_bundle, item.npc_result)) + txs_added.append(NewPeakItem(item.spend_bundle_name, item.spend_bundle, item.conds)) mempool_item_removals.extend(info.removals) log.info( f"Size of mempool: {self.mempool.size()} spends, " diff --git a/chia/rpc/full_node_rpc_api.py b/chia/rpc/full_node_rpc_api.py index c0f00c567278..930ca48ebfc8 100644 --- a/chia/rpc/full_node_rpc_api.py +++ b/chia/rpc/full_node_rpc_api.py @@ -5,7 +5,6 @@ from chia.consensus.block_record import BlockRecord from chia.consensus.blockchain import Blockchain, BlockchainMutexPriority -from chia.consensus.cost_calculator import NPCResult from chia.consensus.pos_quality import UI_ACTUAL_SPACE_CONSTANT_FACTOR from chia.full_node.fee_estimator_interface import FeeEstimatorInterface from chia.full_node.full_node import FullNode @@ -24,6 +23,7 @@ from chia.types.generator_types import BlockGenerator from chia.types.mempool_inclusion_status import MempoolInclusionStatus from chia.types.spend_bundle import SpendBundle +from chia.types.spend_bundle_conditions import SpendBundleConditions from chia.types.unfinished_header_block import UnfinishedHeaderBlock from chia.util.byte_types import hexstr_to_bytes from chia.util.ints import uint32, uint64, uint128 @@ -867,12 +867,10 @@ async def _validate_fee_estimate_cost(self, request: Dict[str, Any]) -> uint64: if "spend_bundle" in request: spend_bundle: SpendBundle = SpendBundle.from_json_dict(request["spend_bundle"]) spend_name = spend_bundle.name() - npc_result: NPCResult = await self.service.mempool_manager.pre_validate_spendbundle( + conds: SpendBundleConditions = await self.service.mempool_manager.pre_validate_spendbundle( spend_bundle, None, spend_name ) - if npc_result.error is not None: - raise RuntimeError(f"Spend Bundle failed validation: {npc_result.error}") - cost = uint64(0 if npc_result.conds is None else npc_result.conds.cost) + cost = conds.cost elif "cost" in request: cost = request["cost"] else: diff --git a/chia/types/eligible_coin_spends.py b/chia/types/eligible_coin_spends.py index 7a450bd67376..5e799cd81d8f 100644 --- a/chia/types/eligible_coin_spends.py +++ b/chia/types/eligible_coin_spends.py @@ -333,16 +333,16 @@ async def process_fast_forward_spends( ) # We need to run the new spend bundle to make sure it remains valid generator = simple_solution_generator(new_sb) - assert mempool_item.npc_result.conds is not None new_npc_result = get_name_puzzle_conditions( generator=generator, - max_cost=mempool_item.npc_result.conds.cost, + max_cost=mempool_item.conds.cost, mempool_mode=True, height=height, constants=constants, ) if new_npc_result.error is not None: raise ValueError("Mempool item became invalid after singleton fast forward.") + assert new_npc_result.conds is not None # Update bundle_coin_spends using the collected data for coin_id in replaced_coin_ids: mempool_item.bundle_coin_spends.pop(coin_id, None) @@ -354,4 +354,4 @@ async def process_fast_forward_spends( # change. Still, it's good form to update the spend bundle with the # new coin spends mempool_item.spend_bundle = new_sb - mempool_item.npc_result = new_npc_result + mempool_item.conds = new_npc_result.conds diff --git a/chia/types/internal_mempool_item.py b/chia/types/internal_mempool_item.py index ebab9c12ccc1..922bc8aefe41 100644 --- a/chia/types/internal_mempool_item.py +++ b/chia/types/internal_mempool_item.py @@ -3,17 +3,17 @@ from dataclasses import dataclass from typing import Dict -from chia.consensus.cost_calculator import NPCResult from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.mempool_item import BundleCoinSpend from chia.types.spend_bundle import SpendBundle +from chia.types.spend_bundle_conditions import SpendBundleConditions from chia.util.ints import uint32 @dataclass class InternalMempoolItem: spend_bundle: SpendBundle - npc_result: NPCResult + conds: SpendBundleConditions height_added_to_mempool: uint32 # Map of coin ID to coin spend data between the bundle and its NPCResult bundle_coin_spends: Dict[bytes32, BundleCoinSpend] diff --git a/chia/types/mempool_item.py b/chia/types/mempool_item.py index de2c41feda2e..5a369458e696 100644 --- a/chia/types/mempool_item.py +++ b/chia/types/mempool_item.py @@ -3,11 +3,11 @@ from dataclasses import dataclass, field from typing import Any, Dict, List, Optional -from chia.consensus.cost_calculator import NPCResult from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.coin_spend import CoinSpend from chia.types.spend_bundle import SpendBundle +from chia.types.spend_bundle_conditions import SpendBundleConditions from chia.util.ints import uint32, uint64 from chia.util.streamable import recurse_jsonify @@ -26,7 +26,7 @@ class BundleCoinSpend: class MempoolItem: spend_bundle: SpendBundle fee: uint64 - npc_result: NPCResult + conds: SpendBundleConditions spend_bundle_name: bytes32 height_added_to_mempool: uint32 @@ -38,7 +38,8 @@ class MempoolItem: assert_before_height: Optional[uint32] = None assert_before_seconds: Optional[uint64] = None - # Map of coin ID to coin spend data between the bundle and its NPCResult + # Map of coin ID to coin spend data between the bundle and its + # SpendBundleConditions bundle_coin_spends: Dict[bytes32, BundleCoinSpend] = field(default_factory=dict) def __lt__(self, other: MempoolItem) -> bool: @@ -57,13 +58,12 @@ def name(self) -> bytes32: @property def cost(self) -> uint64: - return uint64(0 if self.npc_result.conds is None else self.npc_result.conds.cost) + return uint64(0 if self.conds is None else self.conds.cost) @property def additions(self) -> List[Coin]: - assert self.npc_result.conds is not None additions: List[Coin] = [] - for spend in self.npc_result.conds.spends: + for spend in self.conds.spends: for puzzle_hash, amount, _ in spend.create_coin: coin = Coin(spend.coin_id, puzzle_hash, uint64(amount)) additions.append(coin) @@ -77,7 +77,7 @@ def to_json_dict(self) -> Dict[str, Any]: return { "spend_bundle": recurse_jsonify(self.spend_bundle), "fee": recurse_jsonify(self.fee), - "npc_result": recurse_jsonify(self.npc_result), + "npc_result": {"Error": None, "conds": recurse_jsonify(self.conds)}, "cost": recurse_jsonify(self.cost), "spend_bundle_name": recurse_jsonify(self.spend_bundle_name), "additions": recurse_jsonify(self.additions), From 3cecc27dd26606cec517a0a9941a115c3cac0232 Mon Sep 17 00:00:00 2001 From: Almog De Paz Date: Mon, 8 Jul 2024 19:45:03 +0300 Subject: [PATCH 44/77] ad.fee_estimation_fixes (#18262) --- .../test_fee_estimation_unit_tests.py | 30 +++++++++---------- chia/full_node/fee_estimator.py | 2 +- chia/full_node/fee_tracker.py | 5 ++-- 3 files changed, 19 insertions(+), 18 deletions(-) diff --git a/chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py b/chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py index c77d5b6307f3..e94b08f3a4c4 100644 --- a/chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py +++ b/chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py @@ -61,30 +61,30 @@ def test_steady_fee_pressure() -> None: estimator = create_bitcoin_fee_estimator(max_block_cost_clvm) cost = uint64(5000000) fee = uint64(10000000) + time_offset_seconds = 40 num_blocks_wait_in_mempool = 5 start = 100 end = 300 estimates_during = [] + start_from = 250 for height in range(start, end): height = uint32(height) items = make_block(height, 1, cost, fee, num_blocks_wait_in_mempool) estimator.new_block(FeeBlockInfo(uint32(height), items)) - estimates_during.append(estimator.estimate_fee_rate(time_offset_seconds=40 * height)) - - # est = estimator.estimate_fee_rate(time_offset_seconds=240) #TODO - e = [] - - for seconds in range(30, 5 * 60, 30): - est2 = estimator.estimate_fee_rate(time_offset_seconds=seconds) - e.append(est2) - - # assert est == FeeRate.create(Mojos(fee), CLVMCost(cost)) #TODO - estimates_after = [estimator.estimate_fee_rate(time_offset_seconds=40 * height) for height in range(start, end)] - block_estimates = [estimator.estimate_fee_rate_for_block(uint32(h)) for h in range(start, end)] - - assert estimates_during == estimates_after - assert estimates_after == block_estimates + if height >= start_from: + estimation = estimator.estimate_fee_rate(time_offset_seconds=time_offset_seconds * (height - start_from)) + estimates_during.append(estimation) + + estimates_after = [] + for height in range(start_from, end): + estimation = estimator.estimate_fee_rate(time_offset_seconds=time_offset_seconds * (height - start_from)) + estimates_after.append(estimation) + + block_estimates = [estimator.estimate_fee_rate_for_block(uint32(h + 1)) for h in range(0, 50)] + for idx, es_after in enumerate(estimates_after): + assert abs(es_after.mojos_per_clvm_cost - estimates_during[idx].mojos_per_clvm_cost) < 0.001 + assert es_after.mojos_per_clvm_cost == block_estimates[idx].mojos_per_clvm_cost def test_init_buckets() -> None: diff --git a/chia/full_node/fee_estimator.py b/chia/full_node/fee_estimator.py index ed5e2b096b31..552dd1623fbc 100644 --- a/chia/full_node/fee_estimator.py +++ b/chia/full_node/fee_estimator.py @@ -28,7 +28,7 @@ def parse(self, fee_result: EstimateResult) -> float: median = fee_result.median if median != -1: - return median + return median / 1000.0 if fail_bucket.start == 0: return -1.0 diff --git a/chia/full_node/fee_tracker.py b/chia/full_node/fee_tracker.py index f38ce2b6478b..5e43518def33 100644 --- a/chia/full_node/fee_tracker.py +++ b/chia/full_node/fee_tracker.py @@ -70,7 +70,7 @@ class FeeStat: # TxConfirmStats # Track historical moving average of this total over block tx_ct_avg: List[float] - # Count the total number of txs confirmed within Y blocks in each bucket + # Count the total number of txs confirmed within Y periods in each bucket # Track the historical moving average of these totals over blocks confirmed_average: List[List[float]] # confirmed_average [y][x] @@ -84,7 +84,7 @@ class FeeStat: # TxConfirmStats decay: float - # Resolution of blocks with which confirmations are tracked + # Resolution of blocks with which confirmations are tracked (number of blocks per period) scale: int # Mempool counts of outstanding transactions @@ -133,6 +133,7 @@ def tx_confirmed(self, blocks_to_confirm: int, item: MempoolItemInfo) -> None: if blocks_to_confirm < 1: raise ValueError("tx_confirmed called with < 1 block to confirm") + # convert from number of blocks to number of periods periods_to_confirm = int((blocks_to_confirm + self.scale - 1) / self.scale) fee_rate = item.fee_per_cost * 1000 From 956894dbd090ab9080f4cbc608d600f610d95b03 Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Mon, 8 Jul 2024 18:47:28 +0200 Subject: [PATCH 45/77] use rust version of SpendBundle (#17430) --- .../core/custom_types/test_spend_bundle.py | 15 +---- chia/_tests/core/mempool/test_mempool.py | 2 +- .../fee_estimation/test_fee_estimation_rpc.py | 3 +- chia/_tests/wallet/cat_wallet/test_trades.py | 2 +- chia/data_layer/data_layer_wallet.py | 5 +- chia/types/spend_bundle.py | 63 +------------------ chia/wallet/trading/offer.py | 7 ++- 7 files changed, 13 insertions(+), 84 deletions(-) diff --git a/chia/_tests/core/custom_types/test_spend_bundle.py b/chia/_tests/core/custom_types/test_spend_bundle.py index 3821749d5317..3dc40111257c 100644 --- a/chia/_tests/core/custom_types/test_spend_bundle.py +++ b/chia/_tests/core/custom_types/test_spend_bundle.py @@ -13,7 +13,6 @@ from chia.types.coin_spend import CoinSpend, make_spend from chia.types.condition_opcodes import ConditionOpcode from chia.types.spend_bundle import SpendBundle -from chia.util.errors import ValidationError from chia.util.ints import uint64 BLANK_SPEND_BUNDLE = SpendBundle(coin_spends=[], aggregated_signature=G2Element()) @@ -29,18 +28,6 @@ def test_round_trip(self): assert sb == spend_bundle - def test_round_trip_with_legacy_key_parsing(self): - spend_bundle = BLANK_SPEND_BUNDLE - json_dict = spend_bundle.to_json_dict() - json_dict["coin_solutions"] = None - SpendBundle.from_json_dict(json_dict) # testing no error because parser just looks at "coin_spends" - json_dict["coin_solutions"] = json_dict["coin_spends"] - del json_dict["coin_spends"] - - sb = SpendBundle.from_json_dict(json_dict) - - assert sb == spend_bundle - def rand_hash(rng: random.Random) -> bytes32: ret = bytearray(32) @@ -80,5 +67,5 @@ def test_compute_additions_create_coin_max_cost() -> None: # make a large number of CoinSpends spends, _ = create_spends(6111) sb = SpendBundle(spends, G2Element()) - with pytest.raises(ValidationError, match="BLOCK_COST_EXCEEDS_MAX"): + with pytest.raises(ValueError, match="cost exceeded"): sb.additions() diff --git a/chia/_tests/core/mempool/test_mempool.py b/chia/_tests/core/mempool/test_mempool.py index bdf917bf0fce..000abfeb4496 100644 --- a/chia/_tests/core/mempool/test_mempool.py +++ b/chia/_tests/core/mempool/test_mempool.py @@ -699,7 +699,7 @@ async def test_invalid_signature( sb: SpendBundle = generate_test_spend_bundle(wallet_a, coin1) assert sb.aggregated_signature != G2Element.generator() - sb = dataclasses.replace(sb, aggregated_signature=G2Element.generator()) + sb = sb.replace(aggregated_signature=G2Element.generator()) res: Optional[Message] = await send_sb(full_node_1, sb) assert res is not None ack: TransactionAck = TransactionAck.from_bytes(res.data) diff --git a/chia/_tests/fee_estimation/test_fee_estimation_rpc.py b/chia/_tests/fee_estimation/test_fee_estimation_rpc.py index 79d7b01af397..de95c8f2b986 100644 --- a/chia/_tests/fee_estimation/test_fee_estimation_rpc.py +++ b/chia/_tests/fee_estimation/test_fee_estimation_rpc.py @@ -17,7 +17,6 @@ from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.spend_bundle import SpendBundle from chia.util.ints import uint64 -from chia.util.streamable import InvalidTypeError @pytest.fixture(scope="function") @@ -157,7 +156,7 @@ async def test_cost_invalid_type(setup_node_and_rpc: Tuple[FullNodeRpcClient, Fu @pytest.mark.anyio async def test_tx_invalid_type(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc - with pytest.raises(InvalidTypeError): + with pytest.raises(TypeError): await full_node_rpc_api.get_fee_estimate({"target_times": [], "spend_bundle": {"coin_spends": 1}}) diff --git a/chia/_tests/wallet/cat_wallet/test_trades.py b/chia/_tests/wallet/cat_wallet/test_trades.py index d54c649c4f45..132ff086adc2 100644 --- a/chia/_tests/wallet/cat_wallet/test_trades.py +++ b/chia/_tests/wallet/cat_wallet/test_trades.py @@ -1917,7 +1917,7 @@ async def test_trade_bad_spend( assert trade_make is not None peer = wallet_node_taker.get_full_node_peer() offer = Offer.from_bytes(trade_make.offer) - bundle = dataclasses.replace(offer._bundle, aggregated_signature=G2Element()) + bundle = offer._bundle.replace(aggregated_signature=G2Element()) offer = dataclasses.replace(offer, _bundle=bundle) tr1, txs1 = await trade_manager_taker.respond_to_offer(offer, peer, DEFAULT_TX_CONFIG, fee=uint64(10)) txs1 = await trade_manager_taker.wallet_state_manager.add_pending_transactions(txs1, sign=False) diff --git a/chia/data_layer/data_layer_wallet.py b/chia/data_layer/data_layer_wallet.py index d211df0fb0f2..0825e1e37024 100644 --- a/chia/data_layer/data_layer_wallet.py +++ b/chia/data_layer/data_layer_wallet.py @@ -562,7 +562,7 @@ async def create_update_state_spend( spend_bundle = SpendBundle([coin_spend], G2Element()) if announce_new_state: - spend_bundle = dataclasses.replace(spend_bundle, coin_spends=[coin_spend, second_coin_spend]) + spend_bundle = spend_bundle.replace(coin_spends=[coin_spend, second_coin_spend]) dl_tx = TransactionRecord( confirmed_at_height=uint32(0), @@ -1182,8 +1182,7 @@ async def make_update_offer( new_solution: Program = dl_solution.replace(rrffrf=new_graftroot, rrffrrf=Program.to([None] * 5)) new_spend: CoinSpend = dl_spend.replace(solution=SerializedProgram.from_program(new_solution)) - new_bundle: SpendBundle = dataclasses.replace( - txs[0].spend_bundle, + new_bundle: SpendBundle = txs[0].spend_bundle.replace( coin_spends=[*all_other_spends, new_spend], ) all_bundles.append(new_bundle) diff --git a/chia/types/spend_bundle.py b/chia/types/spend_bundle.py index 38cd20302925..8def49ab0e29 100644 --- a/chia/types/spend_bundle.py +++ b/chia/types/spend_bundle.py @@ -1,70 +1,13 @@ from __future__ import annotations -from dataclasses import dataclass -from typing import Any, Dict, List - -from chia_rs import AugSchemeMPL, G2Element +import chia_rs from chia.consensus.default_constants import DEFAULT_CONSTANTS -from chia.types.blockchain_format.coin import Coin -from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.errors import Err, ValidationError -from chia.util.streamable import Streamable, streamable, streamable_from_dict -from chia.wallet.util.debug_spend_bundle import debug_spend_bundle - -from .coin_spend import CoinSpend, compute_additions_with_cost - - -@streamable -@dataclass(frozen=True) -class SpendBundle(Streamable): - """ - This is a list of coins being spent along with their solution programs, and a single - aggregated signature. This is the object that most closely corresponds to a bitcoin - transaction (although because of non-interactive signature aggregation, the boundaries - between transactions are more flexible than in bitcoin). - """ - - coin_spends: List[CoinSpend] - aggregated_signature: G2Element - - @classmethod - def aggregate(cls, spend_bundles: List[SpendBundle]) -> SpendBundle: - coin_spends: List[CoinSpend] = [] - sigs: List[G2Element] = [] - for bundle in spend_bundles: - coin_spends += bundle.coin_spends - sigs.append(bundle.aggregated_signature) - aggregated_signature = AugSchemeMPL.aggregate(sigs) - return cls(coin_spends, aggregated_signature) - - # TODO: this should be removed - def additions(self, *, max_cost: int = DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM) -> List[Coin]: - items: List[Coin] = [] - for cs in self.coin_spends: - coins, cost = compute_additions_with_cost(cs, max_cost=max_cost) - max_cost -= cost - if max_cost < 0: - raise ValidationError(Err.BLOCK_COST_EXCEEDS_MAX, "additions() for SpendBundle") - items.extend(coins) - return items - - def removals(self) -> List[Coin]: - return [_.coin for _ in self.coin_spends] - - def name(self) -> bytes32: - return self.get_hash() - def debug(self, agg_sig_additional_data: bytes32 = DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA) -> None: - debug_spend_bundle(self, agg_sig_additional_data) +from .coin_spend import compute_additions_with_cost - @classmethod - def from_json_dict(cls, json_dict: Dict[str, Any]) -> SpendBundle: - if "coin_solutions" in json_dict and "coin_spends" not in json_dict: - json_dict = dict( - aggregated_signature=json_dict["aggregated_signature"], coin_spends=json_dict["coin_solutions"] - ) - return streamable_from_dict(cls, json_dict) +SpendBundle = chia_rs.SpendBundle # This function executes all the puzzles to compute the difference between diff --git a/chia/wallet/trading/offer.py b/chia/wallet/trading/offer.py index ef14792deb24..0c8c74f5948d 100644 --- a/chia/wallet/trading/offer.py +++ b/chia/wallet/trading/offer.py @@ -15,6 +15,7 @@ from chia.util.bech32m import bech32_decode, bech32_encode, convertbits from chia.util.errors import Err, ValidationError from chia.util.ints import uint64 +from chia.util.streamable import parse_rust from chia.wallet.conditions import ( AssertCoinAnnouncement, AssertPuzzleAnnouncement, @@ -683,12 +684,12 @@ def from_bech32(cls, offer_bech32: str) -> Offer: # We basically hijack the SpendBundle versions for most of it @classmethod def parse(cls, f: BinaryIO) -> Offer: - parsed_bundle = SpendBundle.parse(f) + parsed_bundle = parse_rust(f, SpendBundle) return cls.from_bytes(bytes(parsed_bundle)) def stream(self, f: BinaryIO) -> None: - as_spend_bundle = SpendBundle.from_bytes(bytes(self)) - as_spend_bundle.stream(f) + spend_bundle_bytes = self.to_spend_bundle().to_bytes() + f.write(spend_bundle_bytes) def __bytes__(self) -> bytes: return bytes(self.to_spend_bundle()) From c0d5b9b0972650a59f5428c8c35072d1dfebe315 Mon Sep 17 00:00:00 2001 From: Jack Nelson Date: Mon, 8 Jul 2024 16:17:13 -0400 Subject: [PATCH 46/77] CHIA-818 Add Soft Fork options to simulator config (#18247) add fork options to simulator CHIA-818 --- chia/cmds/sim_funcs.py | 26 +++++++++++++++++++------- chia/util/initial-config.yaml | 5 +++++ 2 files changed, 24 insertions(+), 7 deletions(-) diff --git a/chia/cmds/sim_funcs.py b/chia/cmds/sim_funcs.py index a29279e4de9d..070dc3dce2ca 100644 --- a/chia/cmds/sim_funcs.py +++ b/chia/cmds/sim_funcs.py @@ -100,24 +100,36 @@ def create_chia_directory( else: config = load_config(chia_root, "config.yaml") # simulator overrides - config["simulator"]["key_fingerprint"] = fingerprint + sim_config = config["simulator"] + sim_config["key_fingerprint"] = fingerprint if farming_address is None: prefix = config["network_overrides"]["config"]["simulator0"]["address_prefix"] farming_address = encode_puzzle_hash(get_ph_from_fingerprint(fingerprint), prefix) - config["simulator"]["farming_address"] = farming_address + sim_config["farming_address"] = farming_address if plot_directory is not None: - config["simulator"]["plot_directory"] = plot_directory + sim_config["plot_directory"] = plot_directory # Temporary change to fix win / linux differences. - config["simulator"]["plot_directory"] = str(Path(config["simulator"]["plot_directory"])) - if "//" in config["simulator"]["plot_directory"] and os.name != "nt": + sim_config["plot_directory"] = str(Path(sim_config["plot_directory"])) + if "//" in sim_config["plot_directory"] and os.name != "nt": # if we're on linux, we need to convert to a linux path. - config["simulator"]["plot_directory"] = str(PureWindowsPath(config["simulator"]["plot_directory"]).as_posix()) - config["simulator"]["auto_farm"] = auto_farm if auto_farm is not None else True + sim_config["plot_directory"] = str(PureWindowsPath(sim_config["plot_directory"]).as_posix()) + sim_config["auto_farm"] = auto_farm if auto_farm is not None else True farming_ph = decode_puzzle_hash(farming_address) # modify genesis block to give the user the reward simulator_consts = config["network_overrides"]["constants"]["simulator0"] simulator_consts["GENESIS_PRE_FARM_FARMER_PUZZLE_HASH"] = farming_ph.hex() simulator_consts["GENESIS_PRE_FARM_POOL_PUZZLE_HASH"] = farming_ph.hex() + # get fork heights then write back to config + if "HARD_FORK_HEIGHT" not in sim_config: # this meh code is done so that we also write to the config file. + sim_config["HARD_FORK_HEIGHT"] = 0 + if "SOFT_FORK4_HEIGHT" not in sim_config: + sim_config["SOFT_FORK4_HEIGHT"] = 0 + if "SOFT_FORK5_HEIGHT" not in sim_config: + sim_config["SOFT_FORK5_HEIGHT"] = 0 + simulator_consts["HARD_FORK_HEIGHT"] = sim_config["HARD_FORK_HEIGHT"] + simulator_consts["SOFT_FORK4_HEIGHT"] = sim_config["SOFT_FORK4_HEIGHT"] + simulator_consts["SOFT_FORK5_HEIGHT"] = sim_config["SOFT_FORK5_HEIGHT"] + # save config and return the config save_config(chia_root, "config.yaml", config) return config diff --git a/chia/util/initial-config.yaml b/chia/util/initial-config.yaml index 2f108d2f6cd8..9c38d2d1b9c4 100644 --- a/chia/util/initial-config.yaml +++ b/chia/util/initial-config.yaml @@ -654,3 +654,8 @@ simulator: # Should we use real time in the simulated chain? # most tests don't need this, however it is pretty important when writing ChiaLisp use_current_time: True + + # Fork Settings + HARD_FORK_HEIGHT: 0 + SOFT_FORK4_HEIGHT: 0 + SOFT_FORK5_HEIGHT: 0 From 6d0335485964774e5334167381c5807969eff2ac Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Tue, 9 Jul 2024 09:53:17 -0700 Subject: [PATCH 47/77] [CHIA-683] Drop unknown tables when resetting wallet sync DB (#18222) Drop unknown tables when resetting wallet sync DB --- chia/_tests/wallet/rpc/test_wallet_rpc.py | 17 ++++++----------- chia/wallet/wallet_node.py | 13 ++++++------- 2 files changed, 12 insertions(+), 18 deletions(-) diff --git a/chia/_tests/wallet/rpc/test_wallet_rpc.py b/chia/_tests/wallet/rpc/test_wallet_rpc.py index 5974541e6959..112c7b79f4c5 100644 --- a/chia/_tests/wallet/rpc/test_wallet_rpc.py +++ b/chia/_tests/wallet/rpc/test_wallet_rpc.py @@ -2456,6 +2456,7 @@ async def test_set_wallet_resync_on_startup_disable(wallet_rpc_environment: Wall @pytest.mark.anyio +@pytest.mark.limit_consensus_modes(reason="irrelevant") async def test_set_wallet_resync_schema(wallet_rpc_environment: WalletRpcTestEnvironment): env: WalletRpcTestEnvironment = wallet_rpc_environment full_node_api: FullNodeSimulator = env.full_node.api @@ -2470,17 +2471,11 @@ async def test_set_wallet_resync_schema(wallet_rpc_environment: WalletRpcTestEnv dbw: DBWrapper2 = wallet_node.wallet_state_manager.db_wrapper conn: aiosqlite.Connection async with dbw.writer() as conn: - await conn.execute("ALTER TABLE coin_record RENAME TO coin_record_temp") - assert not await wallet_node.reset_sync_db(db_path, fingerprint) - async with dbw.writer() as conn: - await conn.execute("ALTER TABLE coin_record_temp RENAME TO coin_record") - assert await wallet_node.reset_sync_db(db_path, fingerprint) - async with dbw.writer() as conn: - await conn.execute("CREATE TABLE testing_schema (a int, b bool)") - assert not await wallet_node.reset_sync_db(db_path, fingerprint) - async with dbw.writer() as conn: - await conn.execute("DROP TABLE testing_schema") - assert await wallet_node.reset_sync_db(db_path, fingerprint) + await conn.execute("CREATE TABLE blah(temp int)") + await wallet_node.reset_sync_db(db_path, fingerprint) + assert ( + len(list(await conn.execute_fetchall("SELECT name FROM sqlite_master WHERE type='table' AND name='blah'"))) == 0 + ) @pytest.mark.anyio diff --git a/chia/wallet/wallet_node.py b/chia/wallet/wallet_node.py index 3f20ddd7b015..fa909a5d2ce2 100644 --- a/chia/wallet/wallet_node.py +++ b/chia/wallet/wallet_node.py @@ -321,7 +321,7 @@ async def reset_sync_db(self, db_path: Union[Path, str], fingerprint: int) -> bo conn: aiosqlite.Connection # are not part of core wallet tables, but might appear later ignore_tables = {"lineage_proofs_", "sqlite_", "MIGRATED_VALID_TIMES_TXS", "MIGRATED_VALID_TIMES_TRADES"} - required_tables = [ + known_tables = [ "coin_record", "transaction_record", "derivation_paths", @@ -354,22 +354,21 @@ async def reset_sync_db(self, db_path: Union[Path, str], fingerprint: int) -> bo self.log.info("Resetting wallet sync data...") rows = list(await conn.execute_fetchall("SELECT name FROM sqlite_master WHERE type='table'")) names = {x[0] for x in rows} - names = names - set(required_tables) + names = names - set(known_tables) + tables_to_drop = [] for name in names: for ignore_name in ignore_tables: if name.startswith(ignore_name): break else: - self.log.error( - f"Mismatch in expected schema to reset, found unexpected table: {name}. " - "Please check if you've run all migration scripts." - ) - return False + tables_to_drop.append(name) await conn.execute("BEGIN") commit = True tables = [row[0] for row in rows] try: + for table in tables_to_drop: + await conn.execute(f"DROP TABLE {table}") if "coin_record" in tables: await conn.execute("DELETE FROM coin_record") if "interested_coins" in tables: From bccda243b77d09c6bdc41597d6a30a30dbb1f2eb Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Tue, 9 Jul 2024 12:53:45 -0400 Subject: [PATCH 48/77] increase linting timeouts to account for slow fmt bandwidth to github (#18277) --- .github/workflows/upload-pypi-source.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/upload-pypi-source.yml b/.github/workflows/upload-pypi-source.yml index 956fe8882679..97375979742c 100644 --- a/.github/workflows/upload-pypi-source.yml +++ b/.github/workflows/upload-pypi-source.yml @@ -29,7 +29,7 @@ jobs: mypy: name: ${{ matrix.os.emoji }} ${{ matrix.check.name }} - ${{ matrix.os.name }} ${{ matrix.arch.name }} ${{ matrix.python.major_dot_minor }} runs-on: ${{ matrix.os.runs-on[matrix.arch.matrix] }} - timeout-minutes: 10 + timeout-minutes: 20 strategy: fail-fast: false matrix: @@ -109,7 +109,7 @@ jobs: check: name: ${{ matrix.os.emoji }} ${{ matrix.check.name }} - ${{ matrix.os.name }} ${{ matrix.arch.name }} ${{ matrix.python.major_dot_minor }} runs-on: ${{ matrix.os.runs-on[matrix.arch.matrix] }} - timeout-minutes: 10 + timeout-minutes: 20 strategy: fail-fast: false matrix: From b4290a2f2f0c0ab77d3ed22d8ef3a1f9c0bf9e50 Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Tue, 9 Jul 2024 21:16:51 +0200 Subject: [PATCH 49/77] [CHIA-786] remove the original block compression (#18209) remove the original form block compression, now that the hard fork has activated and we serialize CLVM in a more efficient way --- chia/_tests/blockchain/test_blockchain.py | 10 +- chia/_tests/core/full_node/test_full_node.py | 51 ++------ chia/_tests/core/test_full_node_rpc.py | 7 +- chia/_tests/generator/test_compression.py | 116 +------------------ chia/_tests/generator/test_scan.py | 64 ---------- chia/full_node/bundle_tools.py | 99 +--------------- chia/full_node/full_node.py | 14 --- chia/full_node/full_node_api.py | 17 +-- chia/full_node/full_node_store.py | 3 - chia/full_node/generator.py | 23 +--- chia/simulator/block_tools.py | 50 ++------ chia/simulator/full_node_simulator.py | 1 - chia/types/generator_types.py | 12 +- mypy-exclusions.txt | 1 - 14 files changed, 30 insertions(+), 438 deletions(-) delete mode 100644 chia/_tests/generator/test_scan.py diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index 08e68638e4c9..067220c1c98b 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -30,7 +30,6 @@ from chia.consensus.full_block_to_block_record import block_to_block_record from chia.consensus.multiprocess_validation import PreValidationResult from chia.consensus.pot_iterations import is_overflow_block -from chia.full_node.bundle_tools import detect_potential_template_generator from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions from chia.simulator.block_tools import BlockTools, create_block_tools_async from chia.simulator.keyring import TempKeyring @@ -2528,20 +2527,13 @@ async def test_invalid_transactions_ref_list( ) await _validate_and_add_block(b, blocks[-1]) assert blocks[-1].transactions_generator is not None - generator_arg = detect_potential_template_generator(blocks[-1].height, blocks[-1].transactions_generator) - if consensus_mode >= ConsensusMode.HARD_FORK_2_0: - # once the hard for activates, we don't use this form of block - # compression anymore - assert generator_arg is None - else: - assert generator_arg is not None blocks = bt.get_consecutive_blocks( 1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx, - previous_generator=generator_arg, + previous_generator=[blocks[-1].height], ) block = blocks[-1] if consensus_mode >= ConsensusMode.HARD_FORK_2_0: diff --git a/chia/_tests/core/full_node/test_full_node.py b/chia/_tests/core/full_node/test_full_node.py index 33a814d3c9d8..48dc4e46fb43 100644 --- a/chia/_tests/core/full_node/test_full_node.py +++ b/chia/_tests/core/full_node/test_full_node.py @@ -24,7 +24,6 @@ from chia._tests.util.time_out_assert import time_out_assert, time_out_assert_custom_interval, time_out_messages from chia.consensus.block_body_validation import ForkInfo from chia.consensus.pot_iterations import is_overflow_block -from chia.full_node.bundle_tools import detect_potential_template_generator from chia.full_node.full_node import WalletUpdate from chia.full_node.full_node_api import FullNodeAPI from chia.full_node.signage_point import SignagePoint @@ -160,9 +159,7 @@ def check_nodes_in_sync(): class TestFullNodeBlockCompression: @pytest.mark.anyio @pytest.mark.parametrize("tx_size", [3000000000000]) - async def test_block_compression( - self, setup_two_nodes_and_wallet, empty_blockchain, tx_size, self_hostname, consensus_mode - ): + async def test_block_compression(self, setup_two_nodes_and_wallet, empty_blockchain, tx_size, self_hostname): nodes, wallets, bt = setup_two_nodes_and_wallet server_1 = nodes[0].full_node.server server_2 = nodes[1].full_node.server @@ -217,13 +214,6 @@ async def check_transaction_confirmed(transaction) -> bool: # Confirm generator is not compressed program: Optional[SerializedProgram] = (await full_node_1.get_all_full_blocks())[-1].transactions_generator assert program is not None - template = detect_potential_template_generator(uint32(5), program) - if consensus_mode >= ConsensusMode.HARD_FORK_2_0: - # after the hard fork we don't use this compression mechanism - # anymore, we use CLVM backrefs in the encoding instead - assert template is None - else: - assert template is not None assert len((await full_node_1.get_all_full_blocks())[-1].transactions_generator_ref_list) == 0 # Send another tx @@ -252,14 +242,10 @@ async def check_transaction_confirmed(transaction) -> bool: # Confirm generator is compressed program: Optional[SerializedProgram] = (await full_node_1.get_all_full_blocks())[-1].transactions_generator assert program is not None - assert detect_potential_template_generator(uint32(6), program) is None num_blocks = len((await full_node_1.get_all_full_blocks())[-1].transactions_generator_ref_list) - if consensus_mode >= ConsensusMode.HARD_FORK_2_0: - # after the hard fork we don't use this compression mechanism - # anymore, we use CLVM backrefs in the encoding instead - assert num_blocks == 0 - else: - assert num_blocks > 0 + # since the hard fork, we don't use this compression mechanism + # anymore, we use CLVM backrefs in the encoding instead + assert num_blocks == 0 # Farm two empty blocks await full_node_1.farm_new_transaction_block(FarmNewBlockProtocol(ph)) @@ -333,14 +319,10 @@ async def check_transaction_confirmed(transaction) -> bool: # Confirm generator is compressed program: Optional[SerializedProgram] = (await full_node_1.get_all_full_blocks())[-1].transactions_generator assert program is not None - assert detect_potential_template_generator(uint32(9), program) is None num_blocks = len((await full_node_1.get_all_full_blocks())[-1].transactions_generator_ref_list) - if consensus_mode >= ConsensusMode.HARD_FORK_2_0: - # after the hard fork we don't use this compression mechanism - # anymore, we use CLVM backrefs in the encoding instead - assert num_blocks == 0 - else: - assert num_blocks > 0 + # since the hard fork, we don't use this compression mechanism + # anymore, we use CLVM backrefs in the encoding instead + assert num_blocks == 0 # Creates a standard_transaction and an anyone-can-spend tx [tr] = await wallet.generate_signed_transaction( @@ -429,13 +411,6 @@ async def check_transaction_confirmed(transaction) -> bool: # Confirm generator is not compressed program: Optional[SerializedProgram] = (await full_node_1.get_all_full_blocks())[-1].transactions_generator assert program is not None - template = detect_potential_template_generator(uint32(11), program) - if consensus_mode >= ConsensusMode.HARD_FORK_2_0: - # after the hard fork we don't use this compression mechanism - # anymore, we use CLVM backrefs in the encoding instead - assert template is None - else: - assert template is not None assert len((await full_node_1.get_all_full_blocks())[-1].transactions_generator_ref_list) == 0 height = full_node_1.full_node.blockchain.get_peak().height @@ -444,13 +419,6 @@ async def check_transaction_confirmed(transaction) -> bool: all_blocks: List[FullBlock] = await full_node_1.get_all_full_blocks() assert height == len(all_blocks) - 1 - template = full_node_1.full_node.full_node_store.previous_generator - if consensus_mode >= ConsensusMode.HARD_FORK_2_0: - # after the hard fork we don't use this compression mechanism - # anymore, we use CLVM backrefs in the encoding instead - assert template is None - else: - assert template is not None if test_reorgs: reog_blocks = bt.get_consecutive_blocks(14) for r in range(0, len(reog_blocks), 3): @@ -477,11 +445,6 @@ async def check_transaction_confirmed(transaction) -> bool: for result in results: assert result.error is None - # Test revert previous_generator - for block in reog_blocks: - await full_node_1.full_node.add_block(block) - assert full_node_1.full_node.full_node_store.previous_generator is None - class TestFullNodeProtocol: @pytest.mark.anyio diff --git a/chia/_tests/core/test_full_node_rpc.py b/chia/_tests/core/test_full_node_rpc.py index 49b5afcbc2d3..de6b265ccc2c 100644 --- a/chia/_tests/core/test_full_node_rpc.py +++ b/chia/_tests/core/test_full_node_rpc.py @@ -221,10 +221,9 @@ async def test1(two_nodes_sim_and_wallets_services, self_hostname, consensus_mod await full_node_api_1.farm_new_transaction_block(FarmNewBlockProtocol(ph_2)) block: FullBlock = (await full_node_api_1.get_all_full_blocks())[-1] - if consensus_mode < ConsensusMode.HARD_FORK_2_0: - # after the hard fork, we don't compress blocks using - # block references anymore - assert len(block.transactions_generator_ref_list) > 0 # compression has occurred + # since the hard fork, we no longer compress blocks using + # block references anymore + assert block.transactions_generator_ref_list == [] block_spends = await client.get_block_spends(block.header_hash) diff --git a/chia/_tests/generator/test_compression.py b/chia/_tests/generator/test_compression.py index 884741a5d87e..216e965ff26e 100644 --- a/chia/_tests/generator/test_compression.py +++ b/chia/_tests/generator/test_compression.py @@ -13,19 +13,12 @@ from chia._tests.core.make_block_generator import make_spend_bundle from chia._tests.generator.test_rom import run_generator -from chia.full_node.bundle_tools import ( - bundle_suitable_for_compression, - compressed_coin_spend_entry_list, - compressed_spend_bundle_solution, - match_standard_transaction_at_any_index, - simple_solution_generator, - simple_solution_generator_backrefs, -) +from chia.full_node.bundle_tools import simple_solution_generator, simple_solution_generator_backrefs from chia.full_node.mempool_check_conditions import get_puzzle_and_solution_for_coin from chia.simulator.block_tools import test_constants from chia.types.blockchain_format.program import INFINITE_COST, Program from chia.types.blockchain_format.serialized_program import SerializedProgram -from chia.types.generator_types import BlockGenerator, CompressorArg +from chia.types.generator_types import BlockGenerator from chia.types.spend_bundle import SpendBundle from chia.util.byte_types import hexstr_to_bytes from chia.util.ints import uint32 @@ -65,39 +58,6 @@ assert serialized_length(gen2) == len(gen2) -@dataclass(frozen=True) -class MultipleCompressorArg: - arg: List[CompressorArg] - split_offset: int - - -def create_multiple_ref_generator(args: MultipleCompressorArg, spend_bundle: SpendBundle) -> BlockGenerator: - """ - Decompress a transaction by referencing bytes from multiple input generator references - """ - compressed_cse_list = compressed_coin_spend_entry_list(spend_bundle) - program = TEST_MULTIPLE.curry( - DECOMPRESS_PUZZLE, - DECOMPRESS_CSE_WITH_PREFIX, - args.arg[0].start, - args.arg[0].end - args.split_offset, - args.arg[1].end - args.split_offset, - args.arg[1].end, - compressed_cse_list, - ) - - # TODO aqk: Improve ergonomics of CompressorArg -> GeneratorArg conversion - generator_list = [ - args.arg[0].generator, - args.arg[1].generator, - ] - generator_heights = [ - FAKE_BLOCK_HEIGHT1, - FAKE_BLOCK_HEIGHT2, - ] - return BlockGenerator(SerializedProgram.from_program(program), generator_list, generator_heights) - - def spend_bundle_to_coin_spend_entry_list(bundle: SpendBundle) -> List[Any]: r = [] for coin_spend in bundle.coin_spends: @@ -112,81 +72,9 @@ def spend_bundle_to_coin_spend_entry_list(bundle: SpendBundle) -> List[Any]: class TestCompression: - def test_spend_bundle_suitable(self) -> None: - sb: SpendBundle = make_spend_bundle(1) - assert bundle_suitable_for_compression(sb) - def test_compress_spend_bundle(self) -> None: pass - def test_multiple_input_gen_refs(self) -> None: - match = match_standard_transaction_at_any_index(gen1) - assert match is not None - start1, end1 = match - match = match_standard_transaction_at_any_index(gen2) - assert match is not None - start2, end2 = match - ca1 = CompressorArg(FAKE_BLOCK_HEIGHT1, SerializedProgram.from_bytes(gen1), start1, end1) - ca2 = CompressorArg(FAKE_BLOCK_HEIGHT2, SerializedProgram.from_bytes(gen2), start2, end2) - - prefix_len1 = end1 - start1 - prefix_len2 = end2 - start2 - assert prefix_len1 == prefix_len2 - prefix_len = prefix_len1 - results = [] - for split_offset in range(prefix_len): - gen_args = MultipleCompressorArg([ca1, ca2], split_offset) - spend_bundle: SpendBundle = make_spend_bundle(1) - multi_gen = create_multiple_ref_generator(gen_args, spend_bundle) - cost, result = run_generator(multi_gen) - results.append(result) - assert result is not None - assert cost > 0 - assert all(r == results[0] for r in results) - - def test_compressed_block_results(self) -> None: - sb: SpendBundle = make_spend_bundle(1) - match = match_standard_transaction_at_any_index(original_generator) - assert match is not None - start, end = match - ca = CompressorArg(uint32(0), SerializedProgram.from_bytes(original_generator), start, end) - c = compressed_spend_bundle_solution(ca, sb) - s = simple_solution_generator(sb) - assert c != s - cost_c, result_c = run_generator(c) - cost_s, result_s = run_generator(s) - print() - print(result_c) - assert result_c is not None - assert result_s is not None - print(result_s) - assert result_c == result_s - - def test_get_removals_for_single_coin(self) -> None: - sb: SpendBundle = make_spend_bundle(1) - match = match_standard_transaction_at_any_index(original_generator) - assert match is not None - start, end = match - ca = CompressorArg(uint32(0), SerializedProgram.from_bytes(original_generator), start, end) - c = compressed_spend_bundle_solution(ca, sb) - removal = sb.coin_spends[0].coin - spend_info = get_puzzle_and_solution_for_coin(c, removal, 0, test_constants) - assert bytes(spend_info.puzzle) == bytes(sb.coin_spends[0].puzzle_reveal) - assert bytes(spend_info.solution) == bytes(sb.coin_spends[0].solution) - # Test non compressed generator as well - s = simple_solution_generator(sb) - spend_info = get_puzzle_and_solution_for_coin(s, removal, 0, test_constants) - assert bytes(spend_info.puzzle) == bytes(sb.coin_spends[0].puzzle_reveal) - assert bytes(spend_info.solution) == bytes(sb.coin_spends[0].solution) - - # test with backrefs (2.0 hard-fork) - s = simple_solution_generator_backrefs(sb) - spend_info = get_puzzle_and_solution_for_coin(s, removal, test_constants.HARD_FORK_HEIGHT + 1, test_constants) - assert Program.from_bytes(bytes(spend_info.puzzle)) == Program.from_bytes( - bytes(sb.coin_spends[0].puzzle_reveal) - ) - assert Program.from_bytes(bytes(spend_info.solution)) == Program.from_bytes(bytes(sb.coin_spends[0].solution)) - class TestDecompression: def test_deserialization(self) -> None: diff --git a/chia/_tests/generator/test_scan.py b/chia/_tests/generator/test_scan.py deleted file mode 100644 index b8df604eeefa..000000000000 --- a/chia/_tests/generator/test_scan.py +++ /dev/null @@ -1,64 +0,0 @@ -from __future__ import annotations - -from unittest import TestCase - -from chia.full_node.bundle_tools import ( - match_standard_transaction_at_any_index, - match_standard_transaction_exactly_and_return_pubkey, -) -from chia.util.byte_types import hexstr_to_bytes - -gen1 = hexstr_to_bytes( - "ff01ffffffa00000000000000000000000000000000000000000000000000000000000000000ff830186a080ffffff02ffff01ff02ffff01ff02ffff03ff0bffff01ff02ffff03ffff09ff05ffff1dff0bffff1effff0bff0bffff02ff06ffff04ff02ffff04ff17ff8080808080808080ffff01ff02ff17ff2f80ffff01ff088080ff0180ffff01ff04ffff04ff04ffff04ff05ffff04ffff02ff06ffff04ff02ffff04ff17ff80808080ff80808080ffff02ff17ff2f808080ff0180ffff04ffff01ff32ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff06ffff04ff02ffff04ff09ff80808080ffff02ff06ffff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080ffff04ffff01b081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3ff018080ffff80ffff01ffff33ffa06b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9ff830186a08080ff8080808080" # noqa -) - -EXPECTED_START = 46 -PUBKEY_PLUS_SUFFIX = 48 + 4 + 1 -EXPECTED_END = 337 - PUBKEY_PLUS_SUFFIX - -STANDARD_TRANSACTION_1 = hexstr_to_bytes( - """ff02ffff01ff02ffff01ff02ffff03ff0bffff01ff02ffff03ffff09ff05ffff1dff0bffff1effff0bff0bffff02ff06ffff04ff02ffff04ff17ff8080808080808080ffff01ff02ff17ff2f80ffff01ff088080ff0180ffff01ff04ffff04ff04ffff04ff05ffff04ffff02ff06ffff04ff02ffff04ff17ff80808080ff80808080ffff02ff17ff2f808080ff0180ffff04ffff01ff32ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff06ffff04ff02ffff04ff09ff80808080ffff02ff06ffff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080ffff04ffff01b0aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaff018080""" # noqa -) - -STANDARD_TRANSACTION_2 = hexstr_to_bytes( - """ff02ffff01ff02ffff01ff02ffff03ff0bffff01ff02ffff03ffff09ff05ffff1dff0bffff1effff0bff0bffff02ff06ffff04ff02ffff04ff17ff8080808080808080ffff01ff02ff17ff2f80ffff01ff088080ff0180ffff01ff04ffff04ff04ffff04ff05ffff04ffff02ff06ffff04ff02ffff04ff17ff80808080ff80808080ffff02ff17ff2f808080ff0180ffff04ffff01ff32ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff06ffff04ff02ffff04ff09ff80808080ffff02ff06ffff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080ffff04ffff01b0bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbff018080""" # noqa -) - - -class TestScan(TestCase): - def test_match_generator(self): - # match_standard_transaction_at_any_index(generator_body: bytes) -> (int,int): - m = match_standard_transaction_at_any_index(gen1) - assert m == (EXPECTED_START, EXPECTED_END) - - m = match_standard_transaction_at_any_index(b"\xff" + gen1 + b"\x80") - assert m == (EXPECTED_START + 1, EXPECTED_END + 1) - - m = match_standard_transaction_at_any_index(gen1[47:]) - assert m is None - - def test_match_transaction(self): - # match_standard_transaction_exactly_and_return_pubkey(transaction: bytes) -> Optional[bytes]: - m = match_standard_transaction_exactly_and_return_pubkey(STANDARD_TRANSACTION_1) - assert m == hexstr_to_bytes( - "b0aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" - ) - - m = match_standard_transaction_exactly_and_return_pubkey(STANDARD_TRANSACTION_1 + b"\xfa") - assert m is None - - m = match_standard_transaction_exactly_and_return_pubkey(b"\xba" + STANDARD_TRANSACTION_1 + b"\xfa") - assert m is None - - m = match_standard_transaction_exactly_and_return_pubkey(b"\xba" + STANDARD_TRANSACTION_1) - assert m is None - - m = match_standard_transaction_exactly_and_return_pubkey( - gen1[EXPECTED_START : EXPECTED_END + PUBKEY_PLUS_SUFFIX] - ) - assert m == hexstr_to_bytes( - "b081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3" - ) - - m = match_standard_transaction_exactly_and_return_pubkey(gen1) - assert m is None diff --git a/chia/full_node/bundle_tools.py b/chia/full_node/bundle_tools.py index 23a7ddcc927d..34be26342002 100644 --- a/chia/full_node/bundle_tools.py +++ b/chia/full_node/bundle_tools.py @@ -1,18 +1,10 @@ from __future__ import annotations -import re -from typing import List, Optional, Tuple, Union - from chia_rs import solution_generator, solution_generator_backrefs -from chia.full_node.generator import create_compressed_generator -from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.serialized_program import SerializedProgram -from chia.types.coin_spend import CoinSpend -from chia.types.generator_types import BlockGenerator, CompressorArg +from chia.types.generator_types import BlockGenerator from chia.types.spend_bundle import SpendBundle -from chia.util.byte_types import hexstr_to_bytes -from chia.util.ints import uint32 def simple_solution_generator(bundle: SpendBundle) -> BlockGenerator: @@ -25,92 +17,3 @@ def simple_solution_generator_backrefs(bundle: SpendBundle) -> BlockGenerator: spends = [(cs.coin, bytes(cs.puzzle_reveal), bytes(cs.solution)) for cs in bundle.coin_spends] block_program = solution_generator_backrefs(spends) return BlockGenerator(SerializedProgram.from_bytes(block_program), [], []) - - -STANDARD_TRANSACTION_PUZZLE_PREFIX = r"""ff02ffff01ff02ffff01ff02ffff03ff0bffff01ff02ffff03ffff09ff05ffff1dff0bffff1effff0bff0bffff02ff06ffff04ff02ffff04ff17ff8080808080808080ffff01ff02ff17ff2f80ffff01ff088080ff0180ffff01ff04ffff04ff04ffff04ff05ffff04ffff02ff06ffff04ff02ffff04ff17ff80808080ff80808080ffff02ff17ff2f808080ff0180ffff04ffff01ff32ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff06ffff04ff02ffff04ff09ff80808080ffff02ff06ffff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080ffff04ffff01""" # noqa - -STANDARD_TRANSACTION_PUZZLE_PATTERN = re.compile(STANDARD_TRANSACTION_PUZZLE_PREFIX + r"(b0[a-f0-9]{96})ff018080") - - -# match_standard_transaction_anywhere -def match_standard_transaction_at_any_index(generator_body: bytes) -> Optional[Tuple[int, int]]: - """Return (start, end) of match, or None if pattern could not be found""" - - # We intentionally match the entire puzzle, not just the prefix that we will use, - # in case we later want to convert the template generator into a tree of CLVM - # Objects before operating on it - m = STANDARD_TRANSACTION_PUZZLE_PATTERN.search(generator_body.hex()) - if m: - assert m.start() % 2 == 0 and m.end() % 2 == 0 - start = m.start() // 2 - end = (m.end() - 98 - len("ff018080")) // 2 - assert generator_body[start:end] == bytes.fromhex(STANDARD_TRANSACTION_PUZZLE_PREFIX) - return start, end - else: - return None - - -def match_standard_transaction_exactly_and_return_pubkey(puzzle: SerializedProgram) -> Optional[bytes]: - m = STANDARD_TRANSACTION_PUZZLE_PATTERN.fullmatch(bytes(puzzle).hex()) - return None if m is None else hexstr_to_bytes(m.group(1)) - - -def compress_cse_puzzle(puzzle: SerializedProgram) -> Optional[bytes]: - return match_standard_transaction_exactly_and_return_pubkey(puzzle) - - -def compress_coin_spend(coin_spend: CoinSpend) -> List[List[Union[bytes, None, int, Program]]]: - compressed_puzzle = compress_cse_puzzle(coin_spend.puzzle_reveal) - return [ - [coin_spend.coin.parent_coin_info, coin_spend.coin.amount], - [compressed_puzzle, Program.from_bytes(bytes(coin_spend.solution))], - ] - - -def puzzle_suitable_for_compression(puzzle: SerializedProgram) -> bool: - return True if match_standard_transaction_exactly_and_return_pubkey(puzzle) else False - - -def bundle_suitable_for_compression(bundle: SpendBundle) -> bool: - return all(puzzle_suitable_for_compression(coin_spend.puzzle_reveal) for coin_spend in bundle.coin_spends) - - -def compressed_coin_spend_entry_list(bundle: SpendBundle) -> List[List[List[Union[bytes, None, int, Program]]]]: - compressed_cse_list: List[List[List[Union[bytes, None, int, Program]]]] = [] - for coin_spend in bundle.coin_spends: - compressed_cse_list.append(compress_coin_spend(coin_spend)) - return compressed_cse_list - - -def compressed_spend_bundle_solution(original_generator_params: CompressorArg, bundle: SpendBundle) -> BlockGenerator: - compressed_cse_list = compressed_coin_spend_entry_list(bundle) - return create_compressed_generator(original_generator_params, compressed_cse_list) - - -def best_solution_generator_from_template(previous_generator: CompressorArg, bundle: SpendBundle) -> BlockGenerator: - """ - Creates a compressed block generator, taking in a block that passes the checks below - """ - if bundle_suitable_for_compression(bundle): - return compressed_spend_bundle_solution(previous_generator, bundle) - else: - return simple_solution_generator(bundle) - - -def detect_potential_template_generator(block_height: uint32, program: SerializedProgram) -> Optional[CompressorArg]: - """ - If this returns a GeneratorArg, that means that the input, `program`, has a standard transaction - that is not compressed that we can use as a template for future blocks. - If it returns None, this block cannot be used. - In this implementation, we store the offsets needed by the compressor in the GeneratorArg - This block will serve as a template for the compression of other newly farmed blocks. - """ - - m = match_standard_transaction_at_any_index(bytes(program)) - if m is None: - return None - start, end = m - if start and end and end > start >= 0: - return CompressorArg(block_height, program, start, end) - else: - return None diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 33028a86c65d..5c4238deed9c 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -44,7 +44,6 @@ from chia.consensus.multiprocess_validation import PreValidationResult from chia.consensus.pot_iterations import calculate_sp_iters from chia.full_node.block_store import BlockStore -from chia.full_node.bundle_tools import detect_potential_template_generator from chia.full_node.coin_store import CoinStore from chia.full_node.full_node_api import FullNodeAPI from chia.full_node.full_node_store import FullNodeStore, FullNodeStorePeakResult, UnfinishedBlockEntry @@ -1479,12 +1478,6 @@ async def peak_post_processing( f"{len(block.transactions_generator_ref_list) if block.transactions_generator else 'No tx'}" ) - if ( - self.full_node_store.previous_generator is not None - and state_change_summary.fork_height < self.full_node_store.previous_generator.block_height - ): - self.full_node_store.previous_generator = None - hints_to_add, lookup_coin_ids = get_hints_and_subscription_coin_ids( state_change_summary, self.subscriptions.has_coin_subscription, @@ -1549,13 +1542,6 @@ async def peak_post_processing( spent_coins: List[bytes32] = [coin_id for coin_id, _ in state_change_summary.removals] mempool_new_peak_result = await self.mempool_manager.new_peak(self.blockchain.get_tx_peak(), spent_coins) - # Check if we detected a spent transaction, to load up our generator cache - if block.transactions_generator is not None and self.full_node_store.previous_generator is None: - generator_arg = detect_potential_template_generator(block.height, block.transactions_generator) - if generator_arg: - self.log.info(f"Saving previous generator for height {block.height}") - self.full_node_store.previous_generator = generator_arg - return PeakPostProcessingResult( mempool_new_peak_result.items, mempool_new_peak_result.removals, diff --git a/chia/full_node/full_node_api.py b/chia/full_node/full_node_api.py index 723b7bf8a488..161b22e0c16f 100644 --- a/chia/full_node/full_node_api.py +++ b/chia/full_node/full_node_api.py @@ -17,11 +17,7 @@ from chia.consensus.block_record import BlockRecord from chia.consensus.blockchain import BlockchainMutexPriority from chia.consensus.pot_iterations import calculate_ip_iters, calculate_iterations_quality, calculate_sp_iters -from chia.full_node.bundle_tools import ( - best_solution_generator_from_template, - simple_solution_generator, - simple_solution_generator_backrefs, -) +from chia.full_node.bundle_tools import simple_solution_generator, simple_solution_generator_backrefs from chia.full_node.coin_store import CoinStore from chia.full_node.fee_estimate import FeeEstimate, FeeEstimateGroup, fee_rate_v2_to_v1 from chia.full_node.fee_estimator_interface import FeeEstimatorInterface @@ -859,16 +855,7 @@ async def declare_proof_of_space( if peak.height >= self.full_node.constants.HARD_FORK_HEIGHT: block_generator = simple_solution_generator_backrefs(spend_bundle) else: - if self.full_node.full_node_store.previous_generator is not None: - self.log.info( - f"Using previous generator for height " - f"{self.full_node.full_node_store.previous_generator}" - ) - block_generator = best_solution_generator_from_template( - self.full_node.full_node_store.previous_generator, spend_bundle - ) - else: - block_generator = simple_solution_generator(spend_bundle) + block_generator = simple_solution_generator(spend_bundle) def get_plot_sig(to_sign: bytes32, _extra: G1Element) -> G2Element: if to_sign == request.challenge_chain_sp: diff --git a/chia/full_node/full_node_store.py b/chia/full_node/full_node_store.py index e36dcd637950..48a1395432ed 100644 --- a/chia/full_node/full_node_store.py +++ b/chia/full_node/full_node_store.py @@ -21,7 +21,6 @@ from chia.types.blockchain_format.vdf import VDFInfo, validate_vdf from chia.types.end_of_slot_bundle import EndOfSubSlotBundle from chia.types.full_block import FullBlock -from chia.types.generator_types import CompressorArg from chia.types.unfinished_block import UnfinishedBlock from chia.util.ints import uint8, uint32, uint64, uint128 from chia.util.lru_cache import LRUCache @@ -135,7 +134,6 @@ class FullNodeStore: recent_signage_points: LRUCache[bytes32, Tuple[SignagePoint, float]] recent_eos: LRUCache[bytes32, Tuple[EndOfSubSlotBundle, float]] - previous_generator: Optional[CompressorArg] pending_tx_request: Dict[bytes32, bytes32] # tx_id: peer_id peers_with_tx: Dict[bytes32, Set[bytes32]] # tx_id: Set[peer_ids} tx_fetch_tasks: Dict[bytes32, asyncio.Task[None]] # Task id: task @@ -155,7 +153,6 @@ def __init__(self, constants: ConsensusConstants): self.future_ip_cache = {} self.recent_signage_points = LRUCache(500) self.recent_eos = LRUCache(50) - self.previous_generator = None self.future_cache_key_times = {} self.constants = constants self.clear_slots() diff --git a/chia/full_node/generator.py b/chia/full_node/generator.py index 218424dd5c4b..73bc0db1545c 100644 --- a/chia/full_node/generator.py +++ b/chia/full_node/generator.py @@ -1,11 +1,10 @@ from __future__ import annotations import logging -from typing import List, Optional, Union +from typing import List, Optional -from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.serialized_program import SerializedProgram -from chia.types.generator_types import BlockGenerator, CompressorArg, GeneratorBlockCacheInterface +from chia.types.generator_types import BlockGenerator, GeneratorBlockCacheInterface from chia.util.ints import uint32 from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile @@ -36,21 +35,3 @@ def create_block_generator( generator_list.append(previous_generator) generator_heights.append(i) return BlockGenerator(generator, generator_list, generator_heights) - - -def create_compressed_generator( - original_generator: CompressorArg, - compressed_cse_list: List[List[List[Union[bytes, None, int, Program]]]], -) -> BlockGenerator: - """ - Bind the generator block program template to a particular reference block, - template bytes offsets, and SpendBundle. - """ - start = original_generator.start - end = original_generator.end - program = DECOMPRESS_BLOCK.curry( - DECOMPRESS_PUZZLE, DECOMPRESS_CSE_WITH_PREFIX, Program.to(start), Program.to(end), compressed_cse_list - ) - return BlockGenerator( - SerializedProgram.from_program(program), [original_generator.generator], [original_generator.block_height] - ) diff --git a/chia/simulator/block_tools.py b/chia/simulator/block_tools.py index 4c2201c78a01..5b3e99057aef 100644 --- a/chia/simulator/block_tools.py +++ b/chia/simulator/block_tools.py @@ -14,7 +14,7 @@ from dataclasses import dataclass, replace from pathlib import Path from random import Random -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple import anyio from chia_rs import ALLOW_BACKREFS, MEMPOOL_MODE, AugSchemeMPL, G1Element, G2Element, PrivateKey, solution_generator @@ -38,12 +38,7 @@ ) from chia.consensus.vdf_info_computation import get_signage_point_vdf_info from chia.daemon.keychain_proxy import KeychainProxy, connect_to_keychain_and_validate, wrap_local_keychain -from chia.full_node.bundle_tools import ( - best_solution_generator_from_template, - detect_potential_template_generator, - simple_solution_generator, - simple_solution_generator_backrefs, -) +from chia.full_node.bundle_tools import simple_solution_generator, simple_solution_generator_backrefs from chia.full_node.signage_point import SignagePoint from chia.plotting.create_plots import PlotKeys, create_plots from chia.plotting.manager import PlotManager @@ -92,7 +87,7 @@ from chia.types.condition_opcodes import ConditionOpcode from chia.types.end_of_slot_bundle import EndOfSubSlotBundle from chia.types.full_block import FullBlock -from chia.types.generator_types import BlockGenerator, CompressorArg +from chia.types.generator_types import BlockGenerator from chia.types.spend_bundle import SpendBundle from chia.types.unfinished_block import UnfinishedBlock from chia.util.bech32m import encode_puzzle_hash @@ -589,7 +584,8 @@ def get_consecutive_blocks( normalized_to_identity_cc_sp: bool = False, normalized_to_identity_cc_ip: bool = False, current_time: bool = False, - previous_generator: Optional[Union[CompressorArg, List[uint32]]] = None, + # TODO: rename this to block_refs + previous_generator: Optional[List[uint32]] = None, genesis_timestamp: Optional[uint64] = None, force_plot_id: Optional[bytes32] = None, dummy_block_references: bool = False, @@ -795,16 +791,9 @@ def get_consecutive_blocks( block_generator = simple_solution_generator_backrefs(transaction_data) previous_generator = None else: - if type(previous_generator) is CompressorArg: - block_generator = best_solution_generator_from_template( - previous_generator, transaction_data - ) - else: - block_generator = simple_solution_generator(transaction_data) - if type(previous_generator) is list: - block_generator = BlockGenerator( - block_generator.program, [], previous_generator - ) + block_generator = simple_solution_generator(transaction_data) + if previous_generator is not None: + block_generator = BlockGenerator(block_generator.program, [], previous_generator) aggregate_signature = transaction_data.aggregated_signature else: @@ -888,11 +877,6 @@ def get_consecutive_blocks( if full_block.transactions_generator is not None: tx_block_heights.append(full_block.height) - compressor_arg = detect_potential_template_generator( - full_block.height, full_block.transactions_generator - ) - if compressor_arg is not None: - previous_generator = compressor_arg blocks_added_this_sub_slot += 1 @@ -1120,16 +1104,9 @@ def get_consecutive_blocks( block_generator = simple_solution_generator_backrefs(transaction_data) previous_generator = None else: - if previous_generator is not None and type(previous_generator) is CompressorArg: - block_generator = best_solution_generator_from_template( - previous_generator, transaction_data - ) - else: - block_generator = simple_solution_generator(transaction_data) - if type(previous_generator) is list: - block_generator = BlockGenerator( - block_generator.program, [], previous_generator - ) + block_generator = simple_solution_generator(transaction_data) + if previous_generator is not None: + block_generator = BlockGenerator(block_generator.program, [], previous_generator) aggregate_signature = transaction_data.aggregated_signature else: block_generator = None @@ -1216,11 +1193,6 @@ def get_consecutive_blocks( if full_block.transactions_generator is not None: tx_block_heights.append(full_block.height) - compressor_arg = detect_potential_template_generator( - full_block.height, full_block.transactions_generator - ) - if compressor_arg is not None: - previous_generator = compressor_arg blocks_added_this_sub_slot += 1 self.log.info(f"Created block {block_record.height} ov=True, iters {block_record.total_iters}") diff --git a/chia/simulator/full_node_simulator.py b/chia/simulator/full_node_simulator.py index 207ed7af3c07..df31f404f180 100644 --- a/chia/simulator/full_node_simulator.py +++ b/chia/simulator/full_node_simulator.py @@ -208,7 +208,6 @@ async def farm_new_transaction_block( block_list_input=current_blocks, guarantee_transaction_block=True, current_time=current_time, - previous_generator=self.full_node.full_node_store.previous_generator, ) await self.full_node.add_block(more[-1]) return more[-1] diff --git a/chia/types/generator_types.py b/chia/types/generator_types.py index 9cfb5076169a..465c11a3d835 100644 --- a/chia/types/generator_types.py +++ b/chia/types/generator_types.py @@ -1,6 +1,6 @@ from __future__ import annotations -from dataclasses import dataclass, field +from dataclasses import dataclass from typing import List from chia.types.blockchain_format.serialized_program import SerializedProgram @@ -15,16 +15,6 @@ def get_generator_for_block_height(self, height: uint32) -> SerializedProgram: return # type: ignore[return-value] -@dataclass(frozen=True) -class CompressorArg: - """`CompressorArg` is used as input to the Block Compressor""" - - block_height: uint32 - generator: SerializedProgram = field(repr=False) - start: int - end: int - - @streamable @dataclass(frozen=True) class BlockGenerator(Streamable): diff --git a/mypy-exclusions.txt b/mypy-exclusions.txt index a2d7a2c07e49..690e4306e9f7 100644 --- a/mypy-exclusions.txt +++ b/mypy-exclusions.txt @@ -78,7 +78,6 @@ chia._tests.core.util.test_keychain chia._tests.core.util.test_keyring_wrapper chia._tests.core.util.test_lru_cache chia._tests.core.util.test_significant_bits -chia._tests.generator.test_scan chia._tests.plotting.test_plot_manager chia._tests.pools.test_pool_cmdline chia._tests.pools.test_pool_config From ebf747b3ae194604c0aae34751fcb71053e39c34 Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Tue, 9 Jul 2024 12:17:11 -0700 Subject: [PATCH 50/77] [CHIA-839] Refactor KeywringWrapper/FileKeyring for better ergonomics (#18275) * `.get_passphrase` -> `.keyring.get_key()` * `.set_password` -> `.keyring.set_key` * `.delete_passphrase` -> `.keyring.delete_key` * `.*_label` -> `.keyring.*_label` * Introduce DecryptedKeyringData * Fix bad assumption * Fix tests * Pre-commit * Pivot key storage to external metadata for forwards compatibility * typo & passphrase -> key --- .../util/test_file_keyring_synchronization.py | 9 +- chia/_tests/core/util/test_keychain.py | 6 +- chia/_tests/core/util/test_keyring_wrapper.py | 164 +++++++++--------- chia/util/file_keyring.py | 84 +++++++-- chia/util/keychain.py | 27 +-- chia/util/keyring_wrapper.py | 20 --- 6 files changed, 175 insertions(+), 135 deletions(-) diff --git a/chia/_tests/core/util/test_file_keyring_synchronization.py b/chia/_tests/core/util/test_file_keyring_synchronization.py index 610c41c14c8b..3bbd15ec4b0e 100644 --- a/chia/_tests/core/util/test_file_keyring_synchronization.py +++ b/chia/_tests/core/util/test_file_keyring_synchronization.py @@ -10,6 +10,7 @@ from chia._tests.core.util.test_lockfile import wait_for_enough_files_in_directory from chia.simulator.keyring import TempKeyring +from chia.util.file_keyring import Key from chia.util.keyring_wrapper import KeyringWrapper from chia.util.timing import adjusted_timeout @@ -40,9 +41,9 @@ def dummy_set_passphrase(service, user, passphrase, keyring_path, index): sleep(0.1) assert started - KeyringWrapper.get_shared_instance().set_passphrase(service=service, user=user, passphrase=passphrase) + KeyringWrapper.get_shared_instance().keyring.set_key(service=service, user=user, key=passphrase) - found_passphrase = KeyringWrapper.get_shared_instance().get_passphrase(service, user) + found_passphrase = KeyringWrapper.get_shared_instance().keyring.get_key(service, user) if found_passphrase != passphrase: log.error( f"[pid:{os.getpid()}] error: didn't get expected passphrase: " @@ -64,7 +65,7 @@ def test_multiple_writers(self, empty_temp_file_keyring: TempKeyring): num_workers = 10 keyring_path = str(KeyringWrapper.get_shared_instance().keyring.keyring_path) passphrase_list = [ - ("test-service", f"test-user-{index}", f"passphrase {index}", keyring_path, index) + ("test-service", f"test-user-{index}", Key(f"passphrase {index}".encode()), keyring_path, index) for index in range(num_workers) ] @@ -100,5 +101,5 @@ def test_multiple_writers(self, empty_temp_file_keyring: TempKeyring): # Expect: parent process should be able to find all passphrases that were set by the child processes for item in passphrase_list: expected_passphrase = item[2] - actual_passphrase = KeyringWrapper.get_shared_instance().get_passphrase(service=item[0], user=item[1]) + actual_passphrase = KeyringWrapper.get_shared_instance().keyring.get_key(service=item[0], user=item[1]) assert expected_passphrase == actual_passphrase diff --git a/chia/_tests/core/util/test_keychain.py b/chia/_tests/core/util/test_keychain.py index 87c26dc0b80d..15578cc40e9d 100644 --- a/chia/_tests/core/util/test_keychain.py +++ b/chia/_tests/core/util/test_keychain.py @@ -454,14 +454,14 @@ async def test_delete_drops_labels(get_temp_keyring: Keychain, delete_all: bool) keychain.add_key(mnemonic_or_pk=key_data.mnemonic_str(), label=key_data.label) assert key_data == keychain.get_key(key_data.fingerprint, include_secrets=True) assert key_data.label is not None - assert keychain.keyring_wrapper.get_label(key_data.fingerprint) == key_data.label + assert keychain.keyring_wrapper.keyring.get_label(key_data.fingerprint) == key_data.label if delete_all: # Delete the keys via `delete_all` and make sure no labels are left keychain.delete_all_keys() for key_data in keys: - assert keychain.keyring_wrapper.get_label(key_data.fingerprint) is None + assert keychain.keyring_wrapper.keyring.get_label(key_data.fingerprint) is None else: # Delete the keys via fingerprint and make sure the label gets dropped for key_data in keys: keychain.delete_key_by_fingerprint(key_data.fingerprint) - assert keychain.keyring_wrapper.get_label(key_data.fingerprint) is None + assert keychain.keyring_wrapper.keyring.get_label(key_data.fingerprint) is None diff --git a/chia/_tests/core/util/test_keyring_wrapper.py b/chia/_tests/core/util/test_keyring_wrapper.py index 3a8a4d007bf7..3c4fa3cb79b8 100644 --- a/chia/_tests/core/util/test_keyring_wrapper.py +++ b/chia/_tests/core/util/test_keyring_wrapper.py @@ -7,6 +7,7 @@ from chia.simulator.keyring import TempKeyring from chia.util.errors import KeychainFingerprintNotFound, KeychainLabelError, KeychainLabelExists, KeychainLabelInvalid +from chia.util.file_keyring import Key from chia.util.keyring_wrapper import DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE, KeyringWrapper log = logging.getLogger(__name__) @@ -199,71 +200,74 @@ def test_remove_master_passphrase_from_populated_keyring(self, populated_temp_fi ) # When: using a new empty keyring - def test_get_passphrase(self, empty_temp_file_keyring: TempKeyring): + def test_get_key(self, empty_temp_file_keyring: TempKeyring): """ - Simple passphrase setting and retrieval + Simple key setting and retrieval """ - # Expect: passphrase lookup should return None - assert KeyringWrapper.get_shared_instance().get_passphrase("service-abc", "user-xyz") is None + # Expect: key lookup should return None + assert KeyringWrapper.get_shared_instance().keyring.get_key("service-abc", "user-xyz") is None - # When: setting a passphrase - KeyringWrapper.get_shared_instance().set_passphrase("service-abc", "user-xyz", b"super secret passphrase".hex()) - - # Expect: passphrase lookup should succeed - assert ( - KeyringWrapper.get_shared_instance().get_passphrase("service-abc", "user-xyz") - == b"super secret passphrase".hex() + # When: setting a key + KeyringWrapper.get_shared_instance().keyring.set_key( + "service-abc", "user-xyz", Key(b"super secret key", {"foo": "bar"}) ) - # Expect: non-existent passphrase lookup should fail - assert ( - KeyringWrapper.get_shared_instance().get_passphrase("service-123", "some non-existent passphrase") is None + # Expect: key lookup should succeed + assert KeyringWrapper.get_shared_instance().keyring.get_key("service-abc", "user-xyz") == Key( + b"super secret key", {"foo": "bar"} ) + # Expect: non-existent key lookup should fail + assert KeyringWrapper.get_shared_instance().keyring.get_key("service-123", "some non-existent key") is None + # When: using a new empty keyring - def test_set_passphrase_overwrite(self, empty_temp_file_keyring: TempKeyring): + def test_set_key_overwrite(self, empty_temp_file_keyring: TempKeyring): """ - Overwriting a previously-set passphrase should work + Overwriting a previously-set key should work """ - # When: initially setting the passphrase - KeyringWrapper.get_shared_instance().set_passphrase("service-xyz", "user-123", b"initial passphrase".hex()) + # When: initially setting the key + KeyringWrapper.get_shared_instance().keyring.set_key("service-xyz", "user-123", Key(b"initial key")) - # Expect: passphrase lookup should succeed - assert ( - KeyringWrapper.get_shared_instance().get_passphrase("service-xyz", "user-123") - == b"initial passphrase".hex() - ) + # Expect: key lookup should succeed + assert KeyringWrapper.get_shared_instance().keyring.get_key("service-xyz", "user-123") == Key(b"initial key") - # When: updating the same passphrase - KeyringWrapper.get_shared_instance().set_passphrase("service-xyz", "user-123", b"updated passphrase".hex()) + # When: updating the same key + KeyringWrapper.get_shared_instance().keyring.set_key("service-xyz", "user-123", Key(b"updated key")) - # Expect: the updated passphrase should be retrieved - assert ( - KeyringWrapper.get_shared_instance().get_passphrase("service-xyz", "user-123") - == b"updated passphrase".hex() - ) + # Expect: the updated key should be retrieved + assert KeyringWrapper.get_shared_instance().keyring.get_key("service-xyz", "user-123") == Key(b"updated key") # When: using a new empty keyring - def test_delete_passphrase(self, empty_temp_file_keyring: TempKeyring): + def test_delete_key(self, empty_temp_file_keyring: TempKeyring): """ - Deleting a non-existent passphrase should fail gracefully (no exceptions) + Deleting a non-existent key should fail gracefully (no exceptions) """ - # Expect: deleting a non-existent passphrase should fail gracefully - KeyringWrapper.get_shared_instance().delete_passphrase("some service", "some user") + # Expect: deleting a non-existent key should fail gracefully + KeyringWrapper.get_shared_instance().keyring.delete_key("some service", "some user") - # When: setting a passphrase - KeyringWrapper.get_shared_instance().set_passphrase("some service", "some user", b"500p3r 53cr37".hex()) + # When: setting a key + KeyringWrapper.get_shared_instance().keyring.set_key("some service", "some user", Key(b"500p3r 53cr37")) - # Expect: passphrase retrieval should succeed - assert ( - KeyringWrapper.get_shared_instance().get_passphrase("some service", "some user") == b"500p3r 53cr37".hex() + # Expect: key retrieval should succeed + assert KeyringWrapper.get_shared_instance().keyring.get_key("some service", "some user") == Key( + b"500p3r 53cr37" ) - # When: deleting the passphrase - KeyringWrapper.get_shared_instance().delete_passphrase("some service", "some user") + # When: deleting the key + KeyringWrapper.get_shared_instance().keyring.delete_key("some service", "some user") + + # Expect: key retrieval should fail gracefully + assert KeyringWrapper.get_shared_instance().keyring.get_key("some service", "some user") is None + + # Check that metadata is properly deleted + from chia.cmds.passphrase_funcs import obtain_current_passphrase - # Expect: passphrase retrieval should fail gracefully - assert KeyringWrapper.get_shared_instance().get_passphrase("some service", "some user") is None + passphrase = obtain_current_passphrase(use_passphrase_cache=True) + assert KeyringWrapper.get_shared_instance().keyring.cached_file_content.get_decrypted_data_dict(passphrase) == { + "keys": {}, + "labels": {}, + "metadata": {}, + } def test_emoji_master_passphrase(self, empty_temp_file_keyring: TempKeyring): """ @@ -379,49 +383,49 @@ def test_passphrase_hint_update(self, empty_temp_file_keyring: TempKeyring): def test_get_label(self, empty_temp_file_keyring: TempKeyring): keyring_wrapper = KeyringWrapper.get_shared_instance() # label lookup for 1, 2, 3 should return None - assert keyring_wrapper.get_label(1) is None - assert keyring_wrapper.get_label(2) is None - assert keyring_wrapper.get_label(3) is None + assert keyring_wrapper.keyring.get_label(1) is None + assert keyring_wrapper.keyring.get_label(2) is None + assert keyring_wrapper.keyring.get_label(3) is None # Set and validate a label for 1 - keyring_wrapper.set_label(1, "one") - assert keyring_wrapper.get_label(1) == "one" + keyring_wrapper.keyring.set_label(1, "one") + assert keyring_wrapper.keyring.get_label(1) == "one" # Set and validate a label for 3 - keyring_wrapper.set_label(3, "three") + keyring_wrapper.keyring.set_label(3, "three") # And validate all match the expected values - assert keyring_wrapper.get_label(1) == "one" - assert keyring_wrapper.get_label(2) is None - assert keyring_wrapper.get_label(3) == "three" + assert keyring_wrapper.keyring.get_label(1) == "one" + assert keyring_wrapper.keyring.get_label(2) is None + assert keyring_wrapper.keyring.get_label(3) == "three" def test_set_label(self, empty_temp_file_keyring: TempKeyring): keyring_wrapper = KeyringWrapper.get_shared_instance() # Set and validate a label for 1 - keyring_wrapper.set_label(1, "one") - assert keyring_wrapper.get_label(1) == "one" + keyring_wrapper.keyring.set_label(1, "one") + assert keyring_wrapper.keyring.get_label(1) == "one" # Set and validate a label for 2 - keyring_wrapper.set_label(2, "two") - assert keyring_wrapper.get_label(2) == "two" + keyring_wrapper.keyring.set_label(2, "two") + assert keyring_wrapper.keyring.get_label(2) == "two" # Change the label of 2 - keyring_wrapper.set_label(2, "two!") - assert keyring_wrapper.get_label(2) == "two!" + keyring_wrapper.keyring.set_label(2, "two!") + assert keyring_wrapper.keyring.get_label(2) == "two!" # 1 should still have the same label - assert keyring_wrapper.get_label(1) == "one" + assert keyring_wrapper.keyring.get_label(1) == "one" # Change the label of 2 again - keyring_wrapper.set_label(2, "two!!") - assert keyring_wrapper.get_label(2) == "two!!" + keyring_wrapper.keyring.set_label(2, "two!!") + assert keyring_wrapper.keyring.get_label(2) == "two!!" # 1 should still have the same label - assert keyring_wrapper.get_label(1) == "one" + assert keyring_wrapper.keyring.get_label(1) == "one" # Change the label of 1 - keyring_wrapper.set_label(1, "one!") - assert keyring_wrapper.get_label(1) == "one!" + keyring_wrapper.keyring.set_label(1, "one!") + assert keyring_wrapper.keyring.get_label(1) == "one!" # 2 should still have the same label - assert keyring_wrapper.get_label(2) == "two!!" + assert keyring_wrapper.keyring.get_label(2) == "two!!" @pytest.mark.parametrize( "label", @@ -433,8 +437,8 @@ def test_set_label(self, empty_temp_file_keyring: TempKeyring): ) def test_set_special_labels(self, label: str, empty_temp_file_keyring: TempKeyring): keyring_wrapper = KeyringWrapper.get_shared_instance() - keyring_wrapper.set_label(1, label) - assert keyring_wrapper.get_label(1) == label + keyring_wrapper.keyring.set_label(1, label) + assert keyring_wrapper.keyring.get_label(1) == label @pytest.mark.parametrize( "label, exception, message", @@ -456,9 +460,9 @@ def test_set_label_failures( self, label: str, exception: Type[KeychainLabelError], message: str, empty_temp_file_keyring: TempKeyring ) -> None: keyring_wrapper = KeyringWrapper.get_shared_instance() - keyring_wrapper.set_label(1, "one") + keyring_wrapper.keyring.set_label(1, "one") with pytest.raises(exception, match=message) as e: - keyring_wrapper.set_label(1, label) + keyring_wrapper.keyring.set_label(1, label) assert e.value.label == label if isinstance(e.value, KeychainLabelExists): assert e.value.label == "one" @@ -467,20 +471,20 @@ def test_set_label_failures( def test_delete_label(self, empty_temp_file_keyring: TempKeyring) -> None: keyring_wrapper = KeyringWrapper.get_shared_instance() # Set labels for 1,2 and validate them - keyring_wrapper.set_label(1, "one") - keyring_wrapper.set_label(2, "two") - assert keyring_wrapper.get_label(1) == "one" - assert keyring_wrapper.get_label(2) == "two" + keyring_wrapper.keyring.set_label(1, "one") + keyring_wrapper.keyring.set_label(2, "two") + assert keyring_wrapper.keyring.get_label(1) == "one" + assert keyring_wrapper.keyring.get_label(2) == "two" # Remove the label of 1 - keyring_wrapper.delete_label(1) - assert keyring_wrapper.get_label(1) is None - assert keyring_wrapper.get_label(2) == "two" + keyring_wrapper.keyring.delete_label(1) + assert keyring_wrapper.keyring.get_label(1) is None + assert keyring_wrapper.keyring.get_label(2) == "two" # Remove the label of 2 - keyring_wrapper.delete_label(2) - assert keyring_wrapper.get_label(1) is None - assert keyring_wrapper.get_label(2) is None + keyring_wrapper.keyring.delete_label(2) + assert keyring_wrapper.keyring.get_label(1) is None + assert keyring_wrapper.keyring.get_label(2) is None # Make sure the deletion fails for 0-2 for i in range(3): with pytest.raises(KeychainFingerprintNotFound) as e: - keyring_wrapper.delete_label(i) + keyring_wrapper.keyring.delete_label(i) assert e.value.fingerprint == i diff --git a/chia/util/file_keyring.py b/chia/util/file_keyring.py index 0e4b6b12b784..5a7d8c2f98f8 100644 --- a/chia/util/file_keyring.py +++ b/chia/util/file_keyring.py @@ -78,8 +78,8 @@ def decrypt_data(input_data: bytes, key: bytes, nonce: bytes) -> bytes: return output[len(CHECKBYTES_VALUE) :] -def default_file_keyring_data() -> Dict[str, Any]: - return {"keys": {}, "labels": {}} +def default_file_keyring_data() -> DecryptedKeyringData: + return DecryptedKeyringData({}, {}) def keyring_path_from_root(keys_root_path: Path) -> Path: @@ -152,11 +152,13 @@ def get_decrypted_data_dict(self, passphrase: str) -> Dict[str, Any]: data_yml = decrypt_data(encrypted_data_yml, key, self.nonce) return dict(yaml.safe_load(data_yml)) - def update_encrypted_data_dict(self, passphrase: str, decrypted_dict: Dict[str, Any], update_salt: bool) -> None: + def update_encrypted_data_dict( + self, passphrase: str, decrypted_dict: DecryptedKeyringData, update_salt: bool + ) -> None: self.nonce = generate_nonce() if update_salt: self.salt = generate_salt() - data_yaml = yaml.safe_dump(decrypted_dict) + data_yaml = yaml.safe_dump(decrypted_dict.to_dict()) key = symmetric_key_from_passphrase(passphrase, self.salt) self.data = base64.b64encode(encrypt_data(data_yaml.encode(), key, self.nonce)).decode("utf-8") @@ -170,6 +172,58 @@ def to_dict(self) -> Dict[str, Any]: return result +@dataclass(frozen=True) +class Key: + secret: bytes + metadata: Optional[Dict[str, Any]] = None + + @classmethod + def parse(cls, data: str, metadata: Optional[Dict[str, Any]]) -> Key: + return cls( + bytes.fromhex(data), + metadata, + ) + + def to_data(self) -> Union[str, Dict[str, Any]]: + return self.secret.hex() + + +Users = Dict[str, Key] +Services = Dict[str, Users] + + +@dataclass +class DecryptedKeyringData: + services: Services + labels: Dict[int, str] # {fingerprint: label} + + @classmethod + def from_dict(cls, data_dict: Dict[str, Any]) -> DecryptedKeyringData: + return cls( + { + service: { + user: Key.parse(key, data_dict.get("metadata", {}).get(service, {}).get(user)) + for user, key in users.items() + } + for service, users in data_dict.get("keys", {}).items() + }, + data_dict.get("labels", {}), + ) + + def to_dict(self) -> Dict[str, Any]: + return { + "keys": { + service: {user: key.to_data() for user, key in users.items()} + for service, users in self.services.items() + }, + "labels": self.labels, + "metadata": { + service: {user: key.metadata for user, key in users.items() if key.metadata is not None} + for service, users in self.services.items() + }, + } + + @final @dataclass class FileKeyring(FileSystemEventHandler): @@ -186,7 +240,7 @@ class FileKeyring(FileSystemEventHandler): load_keyring_lock: threading.RLock = field(default_factory=threading.RLock) # Guards access to needs_load_keyring needs_load_keyring: bool = False # Cache of the decrypted YAML contained in keyring.data - cached_data_dict: Dict[str, Any] = field(default_factory=default_file_keyring_data) + cached_data_dict: DecryptedKeyringData = field(default_factory=default_file_keyring_data) keyring_last_mod_time: Optional[float] = None # Key/value pairs to set on the outer payload on the next write file_content_properties_for_next_write: Dict[str, Any] = field(default_factory=dict) @@ -260,21 +314,19 @@ def has_content(self) -> bool: """ return not self.cached_file_content.empty() - def cached_keys(self) -> Dict[str, Dict[str, str]]: + def cached_keys(self) -> Services: """ Returns keyring.data.keys """ - keys_dict: Dict[str, Dict[str, str]] = self.cached_data_dict["keys"] - return keys_dict + return self.cached_data_dict.services def cached_labels(self) -> Dict[int, str]: """ Returns keyring.data.labels """ - labels_dict: Dict[int, str] = self.cached_data_dict["labels"] - return labels_dict + return self.cached_data_dict.labels - def get_password(self, service: str, user: str) -> Optional[str]: + def get_key(self, service: str, user: str) -> Optional[Key]: """ Returns the passphrase named by the 'user' parameter from the cached keyring data (does not force a read from disk) @@ -282,7 +334,7 @@ def get_password(self, service: str, user: str) -> Optional[str]: with self.lock_and_reload_if_required(): return self.cached_keys().get(service, {}).get(user) - def set_password(self, service: str, user: str, passphrase: str) -> None: + def set_key(self, service: str, user: str, key: Key) -> None: """ Store the passphrase to the keyring data using the name specified by the 'user' parameter. Will force a write to keyring.yaml on success. @@ -292,10 +344,10 @@ def set_password(self, service: str, user: str, passphrase: str) -> None: # Ensure a dictionary exists for the 'service' if keys.get(service) is None: keys[service] = {} - keys[service][user] = passphrase + keys[service][user] = key self.write_keyring() - def delete_password(self, service: str, user: str) -> None: + def delete_key(self, service: str, user: str) -> None: """ Deletes the passphrase named by the 'user' parameter from the keyring data (will force a write to keyring.yaml on success) @@ -378,7 +430,9 @@ def load_keyring(self, passphrase: Optional[str] = None) -> None: # TODO, this prompts for the passphrase interactively, move this out passphrase = obtain_current_passphrase(use_passphrase_cache=True) - self.cached_data_dict.update(self.cached_file_content.get_decrypted_data_dict(passphrase)) + self.cached_data_dict = DecryptedKeyringData.from_dict( + self.cached_file_content.get_decrypted_data_dict(passphrase) + ) def write_keyring(self, fresh_salt: bool = False) -> None: from chia.cmds.passphrase_funcs import obtain_current_passphrase diff --git a/chia/util/keychain.py b/chia/util/keychain.py index fcf7918ffc80..ee5f13ed8767 100644 --- a/chia/util/keychain.py +++ b/chia/util/keychain.py @@ -24,6 +24,7 @@ KeychainSecretsMissing, KeychainUserNotFound, ) +from chia.util.file_keyring import Key from chia.util.hash import std_hash from chia.util.ints import uint32 from chia.util.keyring_wrapper import KeyringWrapper @@ -309,10 +310,10 @@ def _get_key_data(self, index: int, include_secrets: bool = True) -> KeyData: is represented by the class `KeyData`. """ user = get_private_key_user(self.user, index) - read_str = self.keyring_wrapper.get_passphrase(self.service, user) - if read_str is None or len(read_str) == 0: + key = self.keyring_wrapper.keyring.get_key(self.service, user) + if key is None or len(key.secret) == 0: raise KeychainUserNotFound(self.service, user) - str_bytes = bytes.fromhex(read_str) + str_bytes = key.secret public_key = G1Element.from_bytes(str_bytes[: G1Element.SIZE]) fingerprint = public_key.get_fingerprint() @@ -324,7 +325,7 @@ def _get_key_data(self, index: int, include_secrets: bool = True) -> KeyData: return KeyData( fingerprint=uint32(fingerprint), public_key=public_key, - label=self.keyring_wrapper.get_label(fingerprint), + label=self.keyring_wrapper.keyring.get_label(fingerprint), secrets=KeyDataSecrets.from_entropy(entropy) if include_secrets and entropy is not None else None, ) @@ -370,7 +371,7 @@ def add_key( key = AugSchemeMPL.key_gen(seed) assert isinstance(key, PrivateKey) pk = key.get_g1() - key_data = bytes(pk).hex() + entropy.hex() + key_data = Key(bytes(pk) + entropy) fingerprint = pk.get_fingerprint() else: index = self._get_free_private_key_index() @@ -382,7 +383,7 @@ def add_key( pk_bytes = hexstr_to_bytes(mnemonic_or_pk) key = G1Element.from_bytes(pk_bytes) assert isinstance(key, G1Element) - key_data = pk_bytes.hex() + key_data = Key(pk_bytes) fingerprint = key.get_fingerprint() if fingerprint in [pk.get_fingerprint() for pk in self.get_all_public_keys()]: @@ -392,17 +393,17 @@ def add_key( # Try to set the label first, it may fail if the label is invalid or already exists. # This can probably just be moved into `FileKeyring.set_passphrase` after the legacy keyring stuff was dropped. if label is not None: - self.keyring_wrapper.set_label(fingerprint, label) + self.keyring_wrapper.keyring.set_label(fingerprint, label) try: - self.keyring_wrapper.set_passphrase( + self.keyring_wrapper.keyring.set_key( self.service, get_private_key_user(self.user, index), key_data, ) except Exception: if label is not None: - self.keyring_wrapper.delete_label(fingerprint) + self.keyring_wrapper.keyring.delete_label(fingerprint) raise return key @@ -412,13 +413,13 @@ def set_label(self, fingerprint: int, label: str) -> None: Assigns the given label to the first key with the given fingerprint. """ self.get_key(fingerprint) # raise if the fingerprint doesn't exist - self.keyring_wrapper.set_label(fingerprint, label) + self.keyring_wrapper.keyring.set_label(fingerprint, label) def delete_label(self, fingerprint: int) -> None: """ Removes the label assigned to the key with the given fingerprint. """ - self.keyring_wrapper.delete_label(fingerprint) + self.keyring_wrapper.keyring.delete_label(fingerprint) def get_first_private_key(self) -> Optional[Tuple[PrivateKey, bytes]]: """ @@ -515,12 +516,12 @@ def delete_key_by_fingerprint(self, fingerprint: int) -> int: key_data = self._get_key_data(index, include_secrets=False) if key_data.fingerprint == fingerprint: try: - self.keyring_wrapper.delete_label(key_data.fingerprint) + self.keyring_wrapper.keyring.delete_label(key_data.fingerprint) except (KeychainException, NotImplementedError): # Just try to delete the label and move on if there wasn't one pass try: - self.keyring_wrapper.delete_passphrase(self.service, get_private_key_user(self.user, index)) + self.keyring_wrapper.keyring.delete_key(self.service, get_private_key_user(self.user, index)) removed += 1 except Exception: pass diff --git a/chia/util/keyring_wrapper.py b/chia/util/keyring_wrapper.py index 230c00ad1157..7187811daa4b 100644 --- a/chia/util/keyring_wrapper.py +++ b/chia/util/keyring_wrapper.py @@ -264,23 +264,3 @@ def get_master_passphrase_from_credential_store(self) -> Optional[str]: def get_master_passphrase_hint(self) -> Optional[str]: return self.keyring.get_passphrase_hint() - - # Keyring interface - - def get_passphrase(self, service: str, user: str) -> Optional[str]: - return self.get_keyring().get_password(service, user) - - def set_passphrase(self, service: str, user: str, passphrase: str) -> None: - self.get_keyring().set_password(service, user, passphrase) - - def delete_passphrase(self, service: str, user: str) -> None: - self.get_keyring().delete_password(service, user) - - def get_label(self, fingerprint: int) -> Optional[str]: - return self.keyring.get_label(fingerprint) - - def set_label(self, fingerprint: int, label: str) -> None: - self.keyring.set_label(fingerprint, label) - - def delete_label(self, fingerprint: int) -> None: - self.keyring.delete_label(fingerprint) From 6ae45b70369eae3ab23391867846bb84d010a30b Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Wed, 10 Jul 2024 08:45:45 -0700 Subject: [PATCH 51/77] Update changelog for 2.4.2 (#18284) Add 2.4.2 changelog --- CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6604ceb0cb13..77a7e518b9d8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project does not yet adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html) for setuptools_scm/PEP 440 reasons. +## 2.4.2 Chia blockchain 2024-07-10 +## What's Changed +### Fixed +* Fix 12-word mnemonic support in keychain (Fixes #18243) +* Fix backwards compatibility for daemon RPC `add_private_key` + +### Deprecated +macOS 12 (Monterey) is deprecated. This release (2.4.2) will be the last release to support macOS 12 + ## 2.4.1 Chia blockchain 2024-06-25 ## What's Changed From 6d9219b0c461fbd5fbc1a70df779d682c4eb1809 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Wed, 10 Jul 2024 11:47:11 -0400 Subject: [PATCH 52/77] CHIA-597: Use `poetry` for package management (#11057) * preliminary setup with poetry * don't worry about locking the poetry env for the exploration right now * recover black config in pyproject.toml * install.sh for linting workflow * pull chiavdf version from poetry in install-timelord.sh * activate etc in upload and lint workflow * debug poetry show output on macos * update to clvm-tools 0.4.4 * add back setproctitle * catchup deps * lock * allow 3.10 * debug * make Install.ps1 use poetry as well * -ErrorAction Ignore * more todo about python version selection * make install-timelord.sh modifications more posix compliant * gitignore .penv* and .venv* as well * remove requirements.penv.txt * get artifact check requirements from poetry * fix .gitignore for venv symlink * separate out poetry setup * : * comment out code to be worked on * setup poetry for dependency check * ignore new poetry symlink * chia_rs==0.1.4 * a few updates * some missed updates `:[` * update some types libs * fix extras * activate * just use `python` somewhere * for windows * less symlinking for windows * oops * s * more * # shellcheck disable=SC2086 * activate action * more python version * lock up * include ipython (for python >=3.8) * add miniupnpc extra * add poetry-dynamic-versioning * type it out * better match dev versioning * catch up and delete setup.py again * lock * add pytest-cov * .penv/bin/poetry * .venv for windows activation * update pytest-xdist * install upnp extra * upnp * quote * maybe update some deps * mandatory and string * oops * slighty tidy * debug pip list * debug * pybind11 maybe instead? * remove pip install debugging * debug without upnp nor pypi.chia.net * chiavdf==1.0.8 * bring back upnp * re-enable upnp * diagnostic exception reraise * .exe? * diagnostics * .cmd...? * light tidy * more cleanup * make dependency check script not cwd dependent for finding the poetry env * correct sh substitution * lock * coverage 7 * todo cleanup * poetry update * interesting * windows too * support non-editable install via scripts again * oops * again * sheesh * fixup * more * more * Update dependencies * various pre-commit linter updates * add pyupgrade to extras list * use priority=supplemental instead of secondary * Add in some missing deps * update setuptools and pyinstaller to match main * venv->.venv * lint fixes * update a few packages for 3.12 support * get zstd from chia resource for python 3.12 * Adjustments to dynamic versioning * experiments with versioning * test fixes Signed-off-by: Earle Lowe * fixup last commit * include upnp as extra when installing on windows * include mozilla-ca as a package for the cacert.pem file * some windows tweaks * use SPECPATH as anchor path in pyinstaller.spec file * Add lxml as dev dependency * Fixup typo in lxml version * Update some packages to resolve GH vuln reports * Update more packages to resolve GH vulns * Ignore pylint license in depedency checker * match installation of the dynamic version plugin on windows * Fix issues with setting windows environment and use pip to install plugin per poetry docs recommendation * properly apply non-editable option with -i * use -f with ln to remove existing file as needed * use shell=true for subprocess to help windows run chia if it's a .cmd * Update .coveragerc * use shutil.which() to find chia executable in a cross platform way * mypy fixes * Address conflicts for numpy based on python version * Update lock file * Remove troublesome reference to pypi source * Add TODOs for manual install of poetry-dynamic-versioning plugin * Use poetry dynamic versioning to figure out the version rather than setuptools_scm * Update .github/workflows/reflow-version.yml Co-authored-by: Kyle Altendorf * Update .github/workflows/test-single.yml Co-authored-by: Kyle Altendorf * Update urllib3 * Attempts to get dynamic versioning to output something semver * more versioning finangling * lint pyproject.toml * Update certifi --------- Signed-off-by: Earle Lowe Co-authored-by: Earle Lowe Co-authored-by: Earle Lowe <30607889+emlowe@users.noreply.github.com> --- .coveragerc | 2 +- .../workflows/check_wheel_availability.yaml | 10 + .github/workflows/dependency-review.yml | 1 + .github/workflows/reflow-version.yml | 6 +- .github/workflows/test-single.yml | 2 +- .github/workflows/upload-pypi-source.yml | 8 +- .gitignore | 3 + Install.ps1 | 40 +- Setup-poetry.ps1 | 12 + activated.ps1 | 2 +- activated.sh | 2 +- build_scripts/check_dependency_artifacts.py | 47 +- build_scripts/installer-version.py | 65 - build_scripts/pyinstaller.spec | 2 +- chia/_tests/core/data_layer/util.py | 11 +- chia/_tests/util/test_installed.py | 2 +- install-timelord.sh | 9 +- install.sh | 46 +- installhelper.py | 6 +- poetry.lock | 3597 +++++++++++++++++ poetry.toml | 2 + pyproject.toml | 155 +- setup-poetry.sh | 36 + setup.py | 140 - 24 files changed, 3921 insertions(+), 285 deletions(-) create mode 100644 Setup-poetry.ps1 delete mode 100644 build_scripts/installer-version.py create mode 100644 poetry.lock create mode 100644 poetry.toml create mode 100755 setup-poetry.sh delete mode 100644 setup.py diff --git a/.coveragerc b/.coveragerc index ec4426c53cc4..c20cff58d24b 100644 --- a/.coveragerc +++ b/.coveragerc @@ -13,7 +13,7 @@ parallel=True [paths] source = chia/ - venv/**/site-packages/chia/ + .venv/**/site-packages/chia/ [report] precision = 1 diff --git a/.github/workflows/check_wheel_availability.yaml b/.github/workflows/check_wheel_availability.yaml index 04f85d37430f..7b0af2e0971d 100644 --- a/.github/workflows/check_wheel_availability.yaml +++ b/.github/workflows/check_wheel_availability.yaml @@ -67,5 +67,15 @@ jobs: python-version: ${{ matrix.python-version }} force-pyenv: ${{ matrix.os.matrix == 'macos' && matrix.arch.matrix == 'intel' }} + - name: Setup Poetry + if: matrix.os.matrix != 'windows' + run: | + sh setup-poetry.sh ${{ matrix.python-version }} + + - name: Setup Poetry + if: matrix.os.matrix == 'windows' + run: | + ./Setup-poetry.ps1 ${{ matrix.python-version }} + - name: Check Wheel Availability run: python build_scripts/check_dependency_artifacts.py diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 3bc3e1a7768c..500c8abce12c 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -21,4 +21,5 @@ jobs: - name: "Dependency Review" uses: actions/dependency-review-action@v4 with: + allow-dependencies-licenses: pkg:pypi/pylint deny-licenses: AGPL-1.0-only, AGPL-1.0-or-later, AGPL-1.0-or-later, AGPL-3.0-or-later, GPL-1.0-only, GPL-1.0-or-later, GPL-2.0-only, GPL-2.0-or-later, GPL-3.0-only, GPL-3.0-or-later diff --git a/.github/workflows/reflow-version.yml b/.github/workflows/reflow-version.yml index cd1c9ce5efef..6339fc28eba8 100644 --- a/.github/workflows/reflow-version.yml +++ b/.github/workflows/reflow-version.yml @@ -40,9 +40,11 @@ jobs: - name: Create installer version number id: version-number + # TODO figure out better way to handle versioning run: | python3 -m venv ../venv . ../venv/bin/activate - pip install setuptools_scm - echo "chia-installer-version=$(python3 ./build_scripts/installer-version.py)" >> "$GITHUB_OUTPUT" + python -m pip install --upgrade pip + pip install poetry "poetry-dynamic-versioning[plugin]" + echo "chia-installer-version=$(poetry version -s)" >> "$GITHUB_OUTPUT" deactivate diff --git a/.github/workflows/test-single.yml b/.github/workflows/test-single.yml index a05b5b70b794..6b88f586000a 100644 --- a/.github/workflows/test-single.yml +++ b/.github/workflows/test-single.yml @@ -228,7 +228,7 @@ jobs: echo '==== collecting installed tests ====' if pytest --collect-only -qq --import-mode=append --pyargs chia._tests > installed_tests_raw then - cat installed_tests_raw | sed -n 's;^venv/.*/\(chia/_tests/.*\);\1;p' | sort > installed_tests + cat installed_tests_raw | sed -n 's;^\.venv/.*/\(chia/_tests/.*\);\1;p' | sort > installed_tests else echo ' ==== installed test collection failure' cat installed_tests_raw diff --git a/.github/workflows/upload-pypi-source.yml b/.github/workflows/upload-pypi-source.yml index 97375979742c..5275e89dc489 100644 --- a/.github/workflows/upload-pypi-source.yml +++ b/.github/workflows/upload-pypi-source.yml @@ -165,11 +165,16 @@ jobs: upload_source_dist: name: Lint and Upload source distribution - runs-on: [ubuntu-latest] + runs-on: ${{ matrix.os }} timeout-minutes: 30 needs: - mypy - check + strategy: + matrix: + python-version: [3.8] + os: [ubuntu-latest] + steps: - name: Add safe git directory uses: chia-network/actions/git-mark-workspace-safe@main @@ -194,6 +199,7 @@ jobs: - name: Build source distribution run: | + # TODO: let poetry do this? python -m build --sdist --outdir dist . - name: Upload artifacts diff --git a/.gitignore b/.gitignore index a3e1ca9ab9f9..4793b2a8afee 100644 --- a/.gitignore +++ b/.gitignore @@ -41,7 +41,10 @@ build_scripts/build # pyenv .python-version +.penv*/ +.venv*/ venv*/ +venv* activate # Editors diff --git a/Install.ps1 b/Install.ps1 index 40b491bc829b..543bb34f1452 100644 --- a/Install.ps1 +++ b/Install.ps1 @@ -10,17 +10,12 @@ param( $ErrorActionPreference = "Stop" $extras = @() +$extras += "upnp" if ($d) { $extras += "dev" } -$pip_parameters = @() -if (-not $i) -{ - $pip_parameters += "--editable" -} - if ([Environment]::Is64BitOperatingSystem -eq $false) { Write-Output "Chia requires a 64-bit Windows installation" @@ -100,31 +95,36 @@ if ($openSSLVersion -lt 269488367) Write-Output "Anything before 1.1.1n is vulnerable to CVE-2022-0778." } -if ($extras.length -gt 0) +$extras_cli = @() +foreach ($extra in $extras) { - $extras_cli = $extras -join "," - $pip_parameters += ".[$extras_cli]" -} -else -{ - $pip_parameters += "." + $extras_cli += "--extras" + $extras_cli += $extra } -py -$pythonVersion -m venv venv +./Setup-poetry.ps1 -pythonVersion "$pythonVersion" +.penv/Scripts/poetry env use $(py -"$pythonVersion" -c 'import sys; print(sys.executable)') +# TODO: Decide if this is needed or should be handled automatically in some way +.penv/Scripts/pip install "poetry-dynamic-versioning[plugin]" +.penv/Scripts/poetry install @extras_cli -venv\scripts\python -m pip install --upgrade pip setuptools wheel -venv\scripts\pip install --extra-index-url https://pypi.chia.net/simple/ miniupnpc==2.2.2 -& venv\scripts\pip install @pip_parameters --extra-index-url https://pypi.chia.net/simple/ +if ($i) +{ + Write-Output "Running 'pip install --no-deps .' for non-editable" + .venv/Scripts/python -m pip install --no-deps . +} if ($p) { $PREV_VIRTUAL_ENV = "$env:VIRTUAL_ENV" - $env:VIRTUAL_ENV = "venv" + $env:VIRTUAL_ENV = ".venv" .\Install-plotter.ps1 bladebit .\Install-plotter.ps1 madmax $env:VIRTUAL_ENV = "$PREV_VIRTUAL_ENV" } +cmd /c mklink /j venv .venv + Write-Output "" Write-Output "Chia blockchain .\Install.ps1 complete." Write-Output "For assistance join us on Discord in the #support chat channel:" @@ -133,6 +133,6 @@ Write-Output "" Write-Output "Try the Quick Start Guide to running chia-blockchain:" Write-Output "https://github.com/Chia-Network/chia-blockchain/wiki/Quick-Start-Guide" Write-Output "" -Write-Output "To install the GUI run '.\venv\scripts\Activate.ps1' then '.\Install-gui.ps1'." +Write-Output "To install the GUI run '.\.venv\scripts\Activate.ps1' then '.\Install-gui.ps1'." Write-Output "" -Write-Output "Type '.\venv\Scripts\Activate.ps1' and then 'chia init' to begin." +Write-Output "Type '.\.venv\Scripts\Activate.ps1' and then 'chia init' to begin." diff --git a/Setup-poetry.ps1 b/Setup-poetry.ps1 new file mode 100644 index 000000000000..9ddd8d03e293 --- /dev/null +++ b/Setup-poetry.ps1 @@ -0,0 +1,12 @@ +param( + [Parameter(Mandatory, HelpMessage="Python version")] + [string] + $pythonVersion +) + +$ErrorActionPreference = "Stop" + +py -$pythonVersion -m venv .penv +.penv/Scripts/python -m pip install --upgrade pip setuptools wheel +# TODO: maybe make our own zipapp/shiv/pex of poetry and download that? +.penv/Scripts/python -m pip install poetry diff --git a/activated.ps1 b/activated.ps1 index eb63e32c9a11..71aad646eb54 100644 --- a/activated.ps1 +++ b/activated.ps1 @@ -6,7 +6,7 @@ $command = $args[0] $parameters = [System.Collections.ArrayList]$args $parameters.RemoveAt(0) -& $script_directory/venv/Scripts/Activate.ps1 +& $script_directory/.venv/Scripts/Activate.ps1 & $command @parameters exit $LASTEXITCODE diff --git a/activated.sh b/activated.sh index f57ba21154ec..719edf662e50 100755 --- a/activated.sh +++ b/activated.sh @@ -7,6 +7,6 @@ SCRIPT_DIRECTORY=$( pwd ) # shellcheck disable=SC1091 -. "${SCRIPT_DIRECTORY}/venv/bin/activate" +. "${SCRIPT_DIRECTORY}/.venv/bin/activate" "$@" diff --git a/build_scripts/check_dependency_artifacts.py b/build_scripts/check_dependency_artifacts.py index f3cb71f7e52b..a751e09be9de 100644 --- a/build_scripts/check_dependency_artifacts.py +++ b/build_scripts/check_dependency_artifacts.py @@ -16,6 +16,10 @@ } +here = pathlib.Path(__file__).parent +project_root = here.parent + + def excepted(path: pathlib.Path) -> bool: # TODO: This should be implemented with a real file name parser though i'm # uncertain at the moment what package that would be. @@ -29,14 +33,10 @@ def main() -> int: print(f"Working in: {directory_string}") print() directory_path = pathlib.Path(directory_string) + artifact_directory_path = directory_path.joinpath("artifacts") + artifact_directory_path.mkdir() extras = ["upnp"] - package_path_string = os.fspath(pathlib.Path(__file__).parent.parent) - - if len(extras) > 0: - package_and_extras = f"{package_path_string}[{','.join(extras)}]" - else: - package_and_extras = package_path_string print("Downloading packages for Python version:") lines = [ @@ -49,6 +49,33 @@ def main() -> int: print(f" {line}") print(flush=True) + requirements_path = directory_path.joinpath("exported_requirements.txt") + + if sys.platform == "win32": + poetry_path = pathlib.Path(".penv/Scripts/poetry") + else: + poetry_path = pathlib.Path(".penv/bin/poetry") + + poetry_path = project_root.joinpath(poetry_path) + + subprocess.run( + [ + os.fspath(poetry_path), + "export", + "--format", + "requirements.txt", + "--output", + os.fspath(requirements_path), + "--without-hashes", + "--no-ansi", + "--no-interaction", + *(f"--extras={extra}" for extra in extras), + ], + check=True, + ) + + env = {key: value for key, value in os.environ.items() if key != "PIP_REQUIRE_VIRTUALENV"} + subprocess.run( [ sys.executable, @@ -56,17 +83,19 @@ def main() -> int: "pip", "download", "--dest", - os.fspath(directory_path), + os.fspath(artifact_directory_path), "--extra-index", "https://pypi.chia.net/simple/", - package_and_extras, + "--requirement", + os.fspath(requirements_path), ], + env=env, check=True, ) failed_artifacts = [] - for artifact in directory_path.iterdir(): + for artifact in artifact_directory_path.iterdir(): if artifact.suffix == ".whl": # everything being a wheel is the target continue diff --git a/build_scripts/installer-version.py b/build_scripts/installer-version.py deleted file mode 100644 index cdd09e072887..000000000000 --- a/build_scripts/installer-version.py +++ /dev/null @@ -1,65 +0,0 @@ -from __future__ import annotations - -from setuptools_scm import get_version - - -# example: 1.0b5.dev225 -def main() -> None: - scm_full_version = get_version(root="..", relative_to=__file__) - # scm_full_version = "1.0.5.dev22" - - left_full_version = scm_full_version.split("+") - version = left_full_version[0].split(".") - scm_major_version = version[0] - scm_minor_version = version[1] - - if len(version) == 3: # If the length of the version array is more than 2 - patch_release_number = version[2] - scm_patch_version = patch_release_number - dev_release_number = "" - elif len(version) == 4: - scm_patch_version = version[2] - dev_release_number = "-" + version[3] - else: - scm_patch_version = "" - dev_release_number = "" - - major_release_number = scm_major_version - minor_release_number = scm_minor_version - - # If this is a beta dev release, get which beta it is - if "0b" in scm_minor_version: - orignial_minor_ver_list = scm_minor_version.split("0b") - major_release_number = str(1 - int(scm_major_version)) # decrement the major release for beta - minor_release_number = scm_major_version - patch_release_number = orignial_minor_ver_list[1] - if scm_patch_version and "dev" in scm_patch_version: - dev_release_number = "." + scm_patch_version - elif "0rc" in version[1]: - original_minor_ver_list = scm_minor_version.split("0rc") - major_release_number = str(1 - int(scm_major_version)) # decrement the major release for release candidate - minor_release_number = str(int(scm_major_version) + 1) # RC is 0.2.1 for RC 1 - patch_release_number = original_minor_ver_list[1] - if scm_patch_version and "dev" in scm_patch_version: - dev_release_number = "." + scm_patch_version - elif len(version) == 2: - patch_release_number = "0" - elif len(version) == 4: # for 1.0.5.dev2 - patch_release_number = scm_patch_version - else: - major_release_number = scm_major_version - minor_release_number = scm_minor_version - patch_release_number = scm_patch_version - dev_release_number = "" - - install_release_number = major_release_number + "." + minor_release_number - if len(patch_release_number) > 0: - install_release_number += "." + patch_release_number - if len(dev_release_number) > 0: - install_release_number += dev_release_number - - print(str(install_release_number)) - - -if __name__ == "__main__": - main() diff --git a/build_scripts/pyinstaller.spec b/build_scripts/pyinstaller.spec index 43693057a5d2..69c3f6ba07ff 100644 --- a/build_scripts/pyinstaller.spec +++ b/build_scripts/pyinstaller.spec @@ -10,7 +10,7 @@ from PyInstaller.utils.hooks import collect_submodules, copy_metadata THIS_IS_WINDOWS = platform.system().lower().startswith("win") THIS_IS_MAC = platform.system().lower().startswith("darwin") -ROOT = pathlib.Path(importlib.import_module("chia").__file__).absolute().parent.parent +ROOT = pathlib.Path(SPECPATH).absolute().parent keyring_imports = collect_submodules("keyring.backends") diff --git a/chia/_tests/core/data_layer/util.py b/chia/_tests/core/data_layer/util.py index 0532582bd946..472ceb65de0a 100644 --- a/chia/_tests/core/data_layer/util.py +++ b/chia/_tests/core/data_layer/util.py @@ -4,6 +4,7 @@ import functools import os import pathlib +import shutil import subprocess from dataclasses import dataclass from typing import IO, TYPE_CHECKING, Any, Dict, Iterator, List, Literal, Optional, Union, overload @@ -137,8 +138,11 @@ def run( if "SYSTEMROOT" in os.environ: kwargs["env"]["SYSTEMROOT"] = os.environ["SYSTEMROOT"] + chia_executable = shutil.which("chia") + if chia_executable is None: + chia_executable = "chia" modified_args: List[Union[str, os_PathLike_str]] = [ - self.scripts_path.joinpath("chia"), + self.scripts_path.joinpath(chia_executable), "--root-path", self.path, *args, @@ -151,7 +155,10 @@ def run( kwargs["stdout"] = stdout kwargs["stderr"] = stderr - return subprocess.run(*final_args, **kwargs) + try: + return subprocess.run(*final_args, **kwargs) + except OSError as e: + raise Exception(f"failed to run:\n {final_args}\n {kwargs}") from e def read_log(self) -> str: return self.path.joinpath("log", "debug.log").read_text(encoding="utf-8") diff --git a/chia/_tests/util/test_installed.py b/chia/_tests/util/test_installed.py index e85a027b1b68..7f361d43330f 100644 --- a/chia/_tests/util/test_installed.py +++ b/chia/_tests/util/test_installed.py @@ -17,4 +17,4 @@ def test_chia_installed() -> None: and testing the installed code checks for that. A next step would be to install using the actual wheel file we are going to publish. """ - assert "venv" in pathlib.Path(chia.__file__).parts + assert ".venv" in pathlib.Path(chia.__file__).parts diff --git a/install-timelord.sh b/install-timelord.sh index a6364a35a2fc..c3c476c646be 100755 --- a/install-timelord.sh +++ b/install-timelord.sh @@ -50,7 +50,14 @@ fi export BUILD_VDF_BENCH=Y # Installs the useful vdf_bench test of CPU squaring speed THE_PATH=$(python -c 'import pathlib, chiavdf, importlib_resources; print(pathlib.Path(chiavdf.__file__).parent)')/vdf_client -CHIAVDF_VERSION=$(python -c 'import os; os.environ["CHIA_SKIP_SETUP"] = "1"; from setup import dependencies; t = [_ for _ in dependencies if _.startswith("chiavdf")][0]; print(t)') +# Note that this picks the version based on the requirement, not the presently +# installed chiavdf. +CHIAVDF_POETRY_INFO=$(.penv/bin/poetry show --no-ansi --no-interaction chiavdf) +echo "${CHIAVDF_POETRY_INFO}" +CHIAVDF_POETRY_INFO_VERSION=$(echo "${CHIAVDF_POETRY_INFO}" | grep 'version[[:space:]]*:' | sed 's/version[[:space:]]*: //') +echo "${CHIAVDF_POETRY_INFO_VERSION}" +CHIAVDF_VERSION="chiavdf==${CHIAVDF_POETRY_INFO_VERSION}" +echo "${CHIAVDF_VERSION}" ubuntu_cmake_install() { UBUNTU_PRE_2004=$(python -c 'import subprocess; id = subprocess.run(["lsb_release", "-is"], stdout=subprocess.PIPE); version = subprocess.run(["lsb_release", "-rs"], stdout=subprocess.PIPE); print(id.stdout.decode("ascii") == "Ubuntu\n" and float(version.stdout) < float(20.04))') diff --git a/install.sh b/install.sh index 06f68013a198..b589d65b3a1b 100755 --- a/install.sh +++ b/install.sh @@ -18,20 +18,20 @@ usage() { echo "${USAGE_TEXT}" } -EXTRAS= +EXTRAS='--extras upnp' PLOTTER_INSTALL= -EDITABLE='-e' +EDITABLE=1 while getopts adilpsh flag; do case "${flag}" in # automated a) : ;; # development - d) EXTRAS=${EXTRAS}dev, ;; + d) EXTRAS="${EXTRAS} --extras dev" ;; # non-editable - i) EDITABLE='' ;; + i) EDITABLE= ;; # legacy keyring - l) EXTRAS=${EXTRAS}legacy-keyring, ;; + l) EXTRAS="${EXTRAS} --extras legacy-keyring" ;; p) PLOTTER_INSTALL=1 ;; # simple install s) : ;; @@ -166,41 +166,23 @@ echo "OpenSSL version for Python is ${OPENSSL_VERSION_STRING}" if [ "$OPENSSL_VERSION_INT" -lt "269488367" ]; then echo "WARNING: OpenSSL versions before 3.0.2, 1.1.1n, or 1.0.2zd are vulnerable to CVE-2022-0778" echo "Your OS may have patched OpenSSL and not updated the version to 1.1.1n" - echo "We recommend updating to the latest version of OpenSSL available for your OS" fi -# If version of `python` and "$INSTALL_PYTHON_VERSION" does not match, clear old version -VENV_CLEAR="" -if [ -e venv/bin/python ]; then - VENV_PYTHON_VER=$(venv/bin/python -V) - TARGET_PYTHON_VER=$($INSTALL_PYTHON_PATH -V) - if [ "$VENV_PYTHON_VER" != "$TARGET_PYTHON_VER" ]; then - echo "existing python version in venv is $VENV_PYTHON_VER while target python version is $TARGET_PYTHON_VER" - echo "Refreshing venv modules..." - VENV_CLEAR="--clear" - fi -fi - -$INSTALL_PYTHON_PATH -m venv venv $VENV_CLEAR +./setup-poetry.sh -c "${INSTALL_PYTHON_PATH}" +.penv/bin/poetry env use "${INSTALL_PYTHON_PATH}" +# TODO: Decide if this is needed or should be handled automatically in some way +.penv/bin/pip install "poetry-dynamic-versioning[plugin]" +# shellcheck disable=SC2086 +.penv/bin/poetry install ${EXTRAS} +ln -s -f .venv venv if [ ! -f "activate" ]; then ln -s venv/bin/activate . fi -EXTRAS=${EXTRAS%,} -if [ -n "${EXTRAS}" ]; then - EXTRAS=[${EXTRAS}] +if [ -z "$EDITABLE" ]; then + .venv/bin/python -m pip install --no-deps . fi -# shellcheck disable=SC1091 -. ./activate -# pip 20.x+ supports Linux binary wheels -python -m pip install --upgrade pip -python -m pip install wheel -#if [ "$INSTALL_PYTHON_VERSION" = "3.8" ]; then -# This remains in case there is a diversion of binary wheels -python -m pip install --extra-index-url https://pypi.chia.net/simple/ miniupnpc==2.2.2 -python -m pip install ${EDITABLE} ."${EXTRAS}" --extra-index-url https://pypi.chia.net/simple/ - if [ -n "$PLOTTER_INSTALL" ]; then set +e PREV_VENV="$VIRTUAL_ENV" diff --git a/installhelper.py b/installhelper.py index 89d8f0fcce8a..51de3f90bcbf 100644 --- a/installhelper.py +++ b/installhelper.py @@ -8,6 +8,7 @@ import json import os +import shutil import subprocess from os.path import exists @@ -51,7 +52,10 @@ def make_semver(version_str: str) -> str: def get_chia_version() -> str: version: str = "0.0" - output = subprocess.run(["chia", "version"], capture_output=True) + chia_executable = shutil.which("chia") + if chia_executable is None: + chia_executable = "chia" + output = subprocess.run([chia_executable, "version"], capture_output=True) if output.returncode == 0: version = str(output.stdout.strip(), "utf-8").splitlines()[-1] return make_semver(version) diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 000000000000..c0225ee7a6f4 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,3597 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "aiofiles" +version = "23.2.1" +description = "File support for asyncio." +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiofiles-23.2.1-py3-none-any.whl", hash = "sha256:19297512c647d4b27a2cf7c34caa7e405c0d60b5560618a29a9fe027b18b0107"}, + {file = "aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a"}, +] + +[[package]] +name = "aiohttp" +version = "3.9.4" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, + {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, + {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, + {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, + {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, + {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, + {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, + {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, + {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, + {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, + {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, + {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, + {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, + {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, + {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, + {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, + {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, + {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, + {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, + {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, + {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, + {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, + {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, + {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, + {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, + {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, + {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiohttp-cors" +version = "0.7.0" +description = "CORS support for aiohttp" +optional = true +python-versions = "*" +files = [ + {file = "aiohttp-cors-0.7.0.tar.gz", hash = "sha256:4d39c6d7100fd9764ed1caf8cebf0eb01bf5e3f24e2e073fda6234bc48b19f5d"}, + {file = "aiohttp_cors-0.7.0-py3-none-any.whl", hash = "sha256:0451ba59fdf6909d0e2cd21e4c0a43752bc0703d33fc78ae94d9d9321710193e"}, +] + +[package.dependencies] +aiohttp = ">=1.1" + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "aiosqlite" +version = "0.20.0" +description = "asyncio bridge to the standard sqlite3 module" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"}, + {file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"}, +] + +[package.dependencies] +typing_extensions = ">=4.0" + +[package.extras] +dev = ["attribution (==1.7.0)", "black (==24.2.0)", "coverage[toml] (==7.4.1)", "flake8 (==7.0.0)", "flake8-bugbear (==24.2.6)", "flit (==3.9.0)", "mypy (==1.8.0)", "ufmt (==2.3.0)", "usort (==1.0.8.post1)"] +docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"] + +[[package]] +name = "altgraph" +version = "0.17.4" +description = "Python graph (network) package" +optional = true +python-versions = "*" +files = [ + {file = "altgraph-0.17.4-py2.py3-none-any.whl", hash = "sha256:642743b4750de17e655e6711601b077bc6598dbfa3ba5fa2b2a35ce12b508dff"}, + {file = "altgraph-0.17.4.tar.gz", hash = "sha256:1b5afbb98f6c4dcadb2e2ae6ab9fa994bbb8c1d75f4fa96d340f9437ae454406"}, +] + +[[package]] +name = "anyio" +version = "4.3.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "argon2-cffi" +version = "23.1.0" +description = "Argon2 for Python" +optional = true +python-versions = ">=3.7" +files = [ + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, +] + +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +optional = true +python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] + +[[package]] +name = "astroid" +version = "3.2.2" +description = "An abstract syntax tree for Python with inference support." +optional = true +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, + {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "backports-tarfile" +version = "1.2.0" +description = "Backport of CPython tarfile module" +optional = false +python-versions = ">=3.8" +files = [ + {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"}, + {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] + +[[package]] +name = "bencode-py" +version = "4.0.0" +description = "Simple bencode parser (for Python 2, Python 3 and PyPy)" +optional = true +python-versions = "*" +files = [ + {file = "bencode.py-4.0.0-py2.py3-none-any.whl", hash = "sha256:99c06a55764e85ffe81622fdf9ee78bd737bad3ea61d119784a54bb28860d962"}, + {file = "bencode.py-4.0.0.tar.gz", hash = "sha256:2a24ccda1725a51a650893d0b63260138359eaa299bb6e7a09961350a2a6e05c"}, +] + +[[package]] +name = "bitarray" +version = "2.8.2" +description = "efficient arrays of booleans -- C extension" +optional = false +python-versions = "*" +files = [ + {file = "bitarray-2.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:525eda30469522cd840a11ba866d0616c132f6c4be8966a297d7545e97fcb822"}, + {file = "bitarray-2.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c3d9730341c825eb167ca06c9dddf6ad4d1b4e71ea7da73cc8c5139fcb5e14ca"}, + {file = "bitarray-2.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad8f8c39c8df184e346184699783f105755003662f0dbe1233d9d9849650ab5f"}, + {file = "bitarray-2.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8d08330d250df47088c13683322083afbdfafdc31df205616506d6b9f068f"}, + {file = "bitarray-2.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56f19ccba8a6ddf1382b0fb4fb8d4e1330e4a1b148e5d198f0981ba2a97c3492"}, + {file = "bitarray-2.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4db2e0f58153a376d9a14873e342d507ca32640640284cddf3c1e74a65929477"}, + {file = "bitarray-2.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b3c27aeea1752f0c1df1e29115e4b6f0249173d71e53c5f7e2c821706f028b"}, + {file = "bitarray-2.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef23f62b3abd287cf368341540ef2a81c86b48de9d488e182e63fe24ac165538"}, + {file = "bitarray-2.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6d79fd3c58a4dc71ffd0fc55982a9a2079fe94c76ccff2777092f6107d6a049a"}, + {file = "bitarray-2.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8528c59d3d3df6618777892b60435022d8917de9ea32933d439c7ffd24437237"}, + {file = "bitarray-2.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c35bb5fe018fd9c42be3c28e74dc7dcfae471c3c6689679dbd0bd1d6dc0f51b7"}, + {file = "bitarray-2.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:232e8faa8e624f3eb0552a636ebe745cee00480e0e56ad62f17808d281838f2e"}, + {file = "bitarray-2.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:945e97ad2bbf7885426f39641a735a31fd4ca2e84e4d0cd271d9880372d6eae1"}, + {file = "bitarray-2.8.2-cp310-cp310-win32.whl", hash = "sha256:88c2d427ab1b20f220c1d53171b0691faa8f0a219367d84e859f1001e90ceefc"}, + {file = "bitarray-2.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7c5745e0f96c2c16c03c7540dbe26f3b62ddee63059be0a014156933f054024"}, + {file = "bitarray-2.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a610426251d1340baa4d8b7942d2cbfe6a1e20b92c66817ab582e0d341185ab5"}, + {file = "bitarray-2.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:599b04b04eb1b5b964a35986bea2bc4381145836fe550cc33c40a796b855b985"}, + {file = "bitarray-2.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9014660472f2080d550157164cc5f9376245a34a0ab877b82b95c1f894af5b28"}, + {file = "bitarray-2.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:532d63c54159f7e0fb520e2f72ef596493bc43810eaa75fac7a188e898ab593b"}, + {file = "bitarray-2.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad1563f11dd70cb1684cfe841e4cf7f35d4f65769de21d12b72cf773a7932615"}, + {file = "bitarray-2.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e456150af62ee1f24a0c9976947629bfb80d80b4fbd37aa901cf794db6ba9b0"}, + {file = "bitarray-2.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cc29909e4cef05d5e49f5d77ace1dc49311c7791734a048b690521c76b4b7a0"}, + {file = "bitarray-2.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:608385f07a4b0391d4982d1efb83ad70920cd8ca495a7868e44d2a4511cbf84e"}, + {file = "bitarray-2.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2baf7ec353fa64917045b3efe26e7c12ce0d7b4d120c3773a612dce54f91585"}, + {file = "bitarray-2.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2c39d1cb04fc277701de6fe2119cc71facc4aff2ca0414b2e326aec337fa1ab4"}, + {file = "bitarray-2.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:3caf4ca668854bb23db4b65af0068238677b5791bcc45694bf8990f3e26e85c9"}, + {file = "bitarray-2.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4bbfe4474d3470c724e283bd1fe8ee9ab3cb6a4c378112926f45d41e326a7622"}, + {file = "bitarray-2.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb941981676dc7859d53199a10a33ca56a3146cce6a45bc6ad70572c1147157d"}, + {file = "bitarray-2.8.2-cp311-cp311-win32.whl", hash = "sha256:e8963d7ac292f41654fa7cbc1a34efdb09e5a42399b2e3689c3fd5b8b4e0fe16"}, + {file = "bitarray-2.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:ee779a291330287b341044635fce2979176d113b0dcce0308dc5d62da7951eec"}, + {file = "bitarray-2.8.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:05d84765bbfd0aa10890c765c56c917c237987325c4e327f3c0febbfc34365c8"}, + {file = "bitarray-2.8.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c7b7be4bff27d7ce6a81d3993755371b5f5b42436afa151868e8fd599acbab19"}, + {file = "bitarray-2.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c3d51ab9f3d5b9a10295abe480c50bf74ee5bf3d984c4cee77e493e575acc869"}, + {file = "bitarray-2.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00bad63ef6f9d22ba36b01b89167176a451ea22a916d1dfa77d73e0298f1d1f9"}, + {file = "bitarray-2.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:225e19d37b234d4d721557434b7d5590cd63b6342492b689e2d694d44d7cc537"}, + {file = "bitarray-2.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7e3ab9870c496e5a058436bf4d96ed111ca6154c8ef8147b70c44c188d6fb2c"}, + {file = "bitarray-2.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff3e182c766cd6f302e99e0d8e44927d533356e9d6ac93fcd09987ebead467aa"}, + {file = "bitarray-2.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7bb559b68eb9cb3c4f867eb9fb39a696c4da70a41fad37b410bd0c7b426a8ce"}, + {file = "bitarray-2.8.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:97e658a3793478d6bca684f47f29f62542312683687bc045dc3cb588160e74b3"}, + {file = "bitarray-2.8.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:dd351b8fbc77c2e2ebc3eeadc0cf72bd5024a43bef5a847697e2b076d1201636"}, + {file = "bitarray-2.8.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:280809e56a7098f48165ce134222098e4cfe7084b10d69bbc31367942e541dfd"}, + {file = "bitarray-2.8.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14bc38ced7edffff25ee748c1eabc530624c9af68f86322b030b11b7918b966f"}, + {file = "bitarray-2.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:de4953b6b1e19dabd23767bd1f83f1cf73978372189dec0e2dd8b3d6971100d6"}, + {file = "bitarray-2.8.2-cp312-cp312-win32.whl", hash = "sha256:99196b4730d887a4bc578f05039b55dc57b131c81b5a5e03efa619b587bdf293"}, + {file = "bitarray-2.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:215a5bf8fdcbed700cc8782d4044e1f036606d5c321710d83e8da6d0fdfe07d5"}, + {file = "bitarray-2.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9c54136c9fab2cefe9801e336b8a3aa7299bcfe7f387379cc6394ad1d5a484b"}, + {file = "bitarray-2.8.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08ad70c1555d9622cecd8f1b132a5341d183a9161aba93cc9739bbaabe4220b0"}, + {file = "bitarray-2.8.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:384be6b7df8fb6a93ddd88d4184094f2ba4f1d07c30dcd4ae164d185d31a2af6"}, + {file = "bitarray-2.8.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd2a098250c683d248a6490ac437ed56f7164d2151572231bd26c76bfe111b11"}, + {file = "bitarray-2.8.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6ae5c18b9a70cb0ae576a8a3c8a9a0659356c016b49cc6b263dd987d344f30d"}, + {file = "bitarray-2.8.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:188f5780f1cfbeba0c3ddb1aa3fa0415ab1a8aa04e9e89f70ad5403197013437"}, + {file = "bitarray-2.8.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5f2a96c5b40727bc21a695d3a106f49e88572fa11427bf2193cabd99e624c901"}, + {file = "bitarray-2.8.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:b6df948da34b5fb949698092573d798c76c54f2f2188db59276d599075f9ed04"}, + {file = "bitarray-2.8.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:a1f00c328b8dae1828844bac019dfe425d10a2043cc70e2f967224c5392d19ad"}, + {file = "bitarray-2.8.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:7965108069f9731306a882872c23ad4f5a8531668e82b27932a19814c52a8dd8"}, + {file = "bitarray-2.8.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:420aa610fe392c4ee700e474673276bb4f3c4f091d001f58b1f018bf650840c1"}, + {file = "bitarray-2.8.2-cp36-cp36m-win32.whl", hash = "sha256:b85929db81105c06e8292c05cac093068e86464555c628c03f99c9f8090d68d4"}, + {file = "bitarray-2.8.2-cp36-cp36m-win_amd64.whl", hash = "sha256:cba09dfd3aea2addc994eb21a861c3cea2d68141bb7ebe68b0e94c73405540f9"}, + {file = "bitarray-2.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:172169099797f1ec469b0aadb00c653193a74757f99312c9c17dc1a18d23d972"}, + {file = "bitarray-2.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:351a4fed240728dcc96966e0c4cfd3dce870525377a1cb5afac8e5cfe116ff7b"}, + {file = "bitarray-2.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff31bef13fd278446b6d1969a46db9f02c36fd905f3e75878f0fe17271f7d897"}, + {file = "bitarray-2.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb8b727cd9ddff848c5f73e65470abb110f026beab403bcebbd74e7439b9bd8f"}, + {file = "bitarray-2.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1356c86eefbde3fe8a3c39fb81bbc8b16acc8e442e191408042e8b1d6904e3"}, + {file = "bitarray-2.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7706336bd15acf4e42300579e42bef742c01a4eb202998f6c20c443a2ce5fd60"}, + {file = "bitarray-2.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a4b43949477dc2b0d3e1d8b7c413ed74f515cef01954cdcc3fb1e2dcc49f2aff"}, + {file = "bitarray-2.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:06d9de5db244c6e45a5318713367765de0a57d82ad616869a004a710a95541e9"}, + {file = "bitarray-2.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:5569c8314335e92570c471d60b4b03eb2a4467864805a560d133d24b27b3961a"}, + {file = "bitarray-2.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:76a4faef4c31953aa7b9ebe00d162f7ce9bc03fc8d423ab2dc690a11d7520a8e"}, + {file = "bitarray-2.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1474db8c4297026e1daa1699e70e25e56dff91104fe025b1a9804332f2737604"}, + {file = "bitarray-2.8.2-cp37-cp37m-win32.whl", hash = "sha256:85b504f233f0484e9a74df4f286a9ae56fbbe2a648c45726761cf7b6f072cdc8"}, + {file = "bitarray-2.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:3dde123ce85d1ba99d9bdf44b1b3174fa22bc8fb10004e0d72bb661a0444c1a9"}, + {file = "bitarray-2.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:23fae6a5a1403d16592b8823d5dea93f738c6e217a1e1bb0eefad242fb03d47f"}, + {file = "bitarray-2.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c44b3022115eb1697315bc51aeadbade1a19d7188bcda66c52d91209cf2963ca"}, + {file = "bitarray-2.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fea9354b7169810e2bdd6f3265ff128b564a25d38479b9ad0a9c5776e4fd0cfc"}, + {file = "bitarray-2.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f699bf2cb223aeec04a106003bd2bf8a4fc6d4c5eddf79cacecb6b267657ac5"}, + {file = "bitarray-2.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:462c9425fbc5315cbc20a72ca62558e5545bb0f6dc9355e2fa96fa747e9b1a80"}, + {file = "bitarray-2.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c8716b4c45fb128cd4da143749e276f150ecb0acb711f4969d7e7ebc9b2a675"}, + {file = "bitarray-2.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79fde5b27e35aedd958f5fb58ebabce47d7eddae5a5e3774088c30c9610195ef"}, + {file = "bitarray-2.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6abf2593b91e36f1cb1c40ac895993c7d2eb30d3f1cb0954a80e5f13697b6b69"}, + {file = "bitarray-2.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ab2e03dd140ab93b91f94a785d1cd6082d5ab53ab6ec958726efa0ad17f7b87a"}, + {file = "bitarray-2.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9e895cc3e5ffee269dd9866097e227a68022ef2b78d627a6ed737534d0c88c14"}, + {file = "bitarray-2.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:0bbeb7120ec1a9b26ce423e74cad7b414cea9e35f8e05599e3b3dceb87f4d1b6"}, + {file = "bitarray-2.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:51d45d56be14b69720d11a8c61e101d86a65dc8a3a9f356bbe4d98cf4f3c5617"}, + {file = "bitarray-2.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:726a598e34657772e5f131115741ea8709e9b55fa35d63c4717bc16b2a737d38"}, + {file = "bitarray-2.8.2-cp38-cp38-win32.whl", hash = "sha256:ab87c4c50d65932788d058adbbd28a209144523ffacbab81dd41582ffce26af9"}, + {file = "bitarray-2.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:316147fb62c810a7667277e5ae7bb75b2871c32d2c398aeb4503cbd4cf3315e7"}, + {file = "bitarray-2.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:36bdde1aba78e4a3a6ce5cbebd0a6bc967b0c3fbd8bd99a197dcc17d654f423c"}, + {file = "bitarray-2.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:932f7b77750dff7140522dc97dfd94533a599ef1c5d0be3733f556fd44a68821"}, + {file = "bitarray-2.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5819b95d0ccce864066f062d2329363ae8a64b9c3d076d039c75ffc9204c2a12"}, + {file = "bitarray-2.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c28b52e59a5e6aa00a929b35b04473bd479a74237ab1170c573c49e8aca61fe"}, + {file = "bitarray-2.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ecdd528268478efeb78ed0132b01104bda6cd8f10c8a57708fc87b1add77e4d"}, + {file = "bitarray-2.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f6f245d4a5e707d48274f38551b654a36db4fb83437c98be00d2019263aa364"}, + {file = "bitarray-2.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b088f06d9e2f523683ae363e227173ac454dbb56c938c6d42791fdd78bad8da7"}, + {file = "bitarray-2.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e883919cea8e446c5c49717a7ce5c93a016a02b9429b81d64b9ab1d80fc12e42"}, + {file = "bitarray-2.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:09d729420b8edc4d8a23a518ae4553074a0054d0441c1a461b425c2f033fab5e"}, + {file = "bitarray-2.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d0d0923087fe1f2d85daa68463d221e90b4b8ed0356480c887eea90b2a2cc7ee"}, + {file = "bitarray-2.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:70cebcf9bc345ac1e034fa781eac3619323eaf87f7bbe26f0e28850beb6f5634"}, + {file = "bitarray-2.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:890355bf6ba3dc04b5a23d1328eb1f6062165e6262197cebc9acfebdcb23144c"}, + {file = "bitarray-2.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f0b54b95e39036c116ffc057b3f56f6084ce88822de3d5d1f57fa38554ccf5c1"}, + {file = "bitarray-2.8.2-cp39-cp39-win32.whl", hash = "sha256:b499d93fa31a73e31ee62f2cbe07e4df833fd7151734b8f07c48ffe3e4547ec5"}, + {file = "bitarray-2.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:b007aaf5810c708c5a2778e371aa546d7084e4e9f82f65865b2ce5a182376f42"}, + {file = "bitarray-2.8.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1b734b074a09b1b2e1de7df423565412d9213faefa8ca422f32be756b189f729"}, + {file = "bitarray-2.8.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd074b06be9484040acb4c2c0462c4d19a43e377716be7ba10440f51a57bb98c"}, + {file = "bitarray-2.8.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678696bb613f0344b79be385747aae705b327a9a32ace45a353dd16497bc719"}, + {file = "bitarray-2.8.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb337ffa10824fa2025c4b1c06a2d809dbed4a4bf9e3ffb262676d084c4e0c50"}, + {file = "bitarray-2.8.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2b3c7aa2c9a6533dc7234d2a303efdcb9df3f4ac4d0919ec1caf568868f12a0a"}, + {file = "bitarray-2.8.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e6765c47b487341837b3731cca3c8033b971ee082f6ab41cb430aa3447686eec"}, + {file = "bitarray-2.8.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8566b535bc4ebb26247d6f636a27bb0038bc93fa7e55121628f5cd6b0906ac"}, + {file = "bitarray-2.8.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56764825f64ab983d32b8c1d4ee483f415f2559e59388ba266a9fcafc44305bf"}, + {file = "bitarray-2.8.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f45f7d58c399e90ee3bddff4f3e2f53ff95c948b2d43de304266153ebd1d778"}, + {file = "bitarray-2.8.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:095851409e0db75b1416c8c3e24957135d5a2a206790578e43739e92a00c17c4"}, + {file = "bitarray-2.8.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8bb60d5a948f00901da1d7e4953189259b3c7ef79391fecd6f18db3f48a036fe"}, + {file = "bitarray-2.8.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2dc483ada55ef35990b67dc0e7a779f0b2ce79d156e452dc8b835b03c0dca9"}, + {file = "bitarray-2.8.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a35e308c23f039064600108fc1c8416bd102bc3cf3a6915761a9f7c801237e0"}, + {file = "bitarray-2.8.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa49f6cfcae4305d8cff028dc9c9a881189a38f7ca43c085aef894c58cb6fbde"}, + {file = "bitarray-2.8.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:111bf9913ebee4630e2cb43b61d0abb39813b231262b114e5268cd6a405a22b9"}, + {file = "bitarray-2.8.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b71d82e3f001bcb53463023f7f37e223fff56cf048f577c6d85597db94770f10"}, + {file = "bitarray-2.8.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:440c537fdf2eaee7fdd41fb1dce5701c490c1964fdb74225b10b49a7c45bc7b4"}, + {file = "bitarray-2.8.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c384c49ce52b82d5b0355000b8aeb7e3a7654997916c1e6fd9d29697edda1076"}, + {file = "bitarray-2.8.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27428d7b0e706307d0c697f81599e7af4f52e5873ea6bc269eae3604b16b81fe"}, + {file = "bitarray-2.8.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4963982d5da0825768f9a80760a8560c3e4cf711a9a7ea06ff9bcb7bd250b131"}, + {file = "bitarray-2.8.2.tar.gz", hash = "sha256:f90b2f44b5b23364d5fbade2c34652e15b1fcfe813c46f828e008f68a709160f"}, +] + +[[package]] +name = "bitstring" +version = "4.1.4" +description = "Simple construction, analysis and modification of binary data." +optional = false +python-versions = ">=3.7" +files = [ + {file = "bitstring-4.1.4-py3-none-any.whl", hash = "sha256:da46c4d6f8f3fb75a85566fdd33d5083ba8b8f268ed76f34eefe5a00da426192"}, + {file = "bitstring-4.1.4.tar.gz", hash = "sha256:94f3f1c45383ebe8fd4a359424ffeb75c2f290760ae8fcac421b44f89ac85213"}, +] + +[package.dependencies] +bitarray = ">=2.8.0,<3.0.0" + +[[package]] +name = "black" +version = "24.4.2" +description = "The uncompromising code formatter." +optional = true +python-versions = ">=3.8" +files = [ + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "boto3" +version = "1.34.114" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.34.114-py3-none-any.whl", hash = "sha256:4460958d2b0c53bd2195b23ed5d45db2350e514486fe8caeb38b285b30742280"}, + {file = "boto3-1.34.114.tar.gz", hash = "sha256:eeb11bca9b19d12baf93436fb8a16b8b824f1f7e8b9bcc722607e862c46b1b08"}, +] + +[package.dependencies] +botocore = ">=1.34.114,<1.35.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.34.121" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.34.121-py3-none-any.whl", hash = "sha256:25b05c7646a9f240cde1c8f839552a43f27e71e15c42600275dea93e219f7dd9"}, + {file = "botocore-1.34.121.tar.gz", hash = "sha256:1a8f94b917c47dfd84a0b531ab607dc53570efb0d073d8686600f2d2be985323"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = [ + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, +] + +[package.extras] +crt = ["awscrt (==0.20.11)"] + +[[package]] +name = "build" +version = "1.2.1" +description = "A simple, correct Python build frontend" +optional = true +python-versions = ">=3.8" +files = [ + {file = "build-1.2.1-py3-none-any.whl", hash = "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"}, + {file = "build-1.2.1.tar.gz", hash = "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "os_name == \"nt\""} +importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} +packaging = ">=19.1" +pyproject_hooks = "*" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] +test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] +typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] +uv = ["uv (>=0.1.18)"] +virtualenv = ["virtualenv (>=20.0.35)"] + +[[package]] +name = "certifi" +version = "2024.7.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = true +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = true +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = true +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = true +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, +] + +[[package]] +name = "chia-base" +version = "0.1.7" +description = "Common types and simple utilities used through chia code base" +optional = false +python-versions = "*" +files = [ + {file = "chia_base-0.1.7.tar.gz", hash = "sha256:d7bc3ab3c550ea2d0defb488d1e480b12abcf2c8966de616e6dc2e49e33e8456"}, +] + +[package.dependencies] +chia_rs = ">=0.5.2" +clvm_rs = "0.2.5" + +[package.extras] +dev = ["black (>=23.1.0)", "pytest (>=7.2.1)", "ruff (>=0.0.252)"] + +[[package]] +name = "chia-rs" +version = "0.10.0" +description = "Code useful for implementing chia consensus." +optional = false +python-versions = "*" +files = [ + {file = "chia_rs-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f63a2ec5ba938a3c9aeae762d2c70bd0922ab89d36fea057619897935b08f3f9"}, + {file = "chia_rs-0.10.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5814605cb5fdae90fcd7034fe468b1897575c2a4e63b103dee732d465cd75dda"}, + {file = "chia_rs-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17a669e666d2675b3e13f6a7ddd5aa0848a9e89cf770b8231680539636e6397d"}, + {file = "chia_rs-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94a242a688370dcd125d317f90c587015f9518e956df5b70f9dce2cb04357a78"}, + {file = "chia_rs-0.10.0-cp310-none-win_amd64.whl", hash = "sha256:a0d2cabc4e2c5c0eff4be539fb06620db71ddd461de070dbe40dac47c7a5724f"}, + {file = "chia_rs-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ee7306fd6a427e8601988475972218c826ec5cf9420641c2d6dda2c7607397b"}, + {file = "chia_rs-0.10.0-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:01052a68b56cde2061554f3cdf4f02fff1be356641ae27664e4e874c554784b3"}, + {file = "chia_rs-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b5b2fb0680aa5e5cd73c323213b1a16bf1a7a32ee1bc82ea6c5d104113d7d31"}, + {file = "chia_rs-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fa8a95dae2edba73f853203b43a7ed0f0eae1faa9fd50cd0cb30c8000ae344"}, + {file = "chia_rs-0.10.0-cp311-none-win_amd64.whl", hash = "sha256:950fcd774eec9ec34ab3dfb4029e85c46c9734cb9ec893ffc977f930ee0ac0c8"}, + {file = "chia_rs-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d8d2bd5cbd4a5774d9d9fa517aef4008cad090a13bec6a2f1a796dd4754ec769"}, + {file = "chia_rs-0.10.0-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:c33b98fa7625e3a04d3d6ec8047a77e3e1ea2a79bfa7c7ed013ee09f1f7abbda"}, + {file = "chia_rs-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3450dde0adc4f82260c1bf42ca3456cb3582cb035dfac3a67f3ff0b65f7353b2"}, + {file = "chia_rs-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e3cb0cf5922517b9da639086b21939727f610e943020b2cccb81b2571cba8c6"}, + {file = "chia_rs-0.10.0-cp312-none-win_amd64.whl", hash = "sha256:e5aad728f40058e6dacecb574854506636cd1bfe035bafc2ee10525c9a4c4827"}, + {file = "chia_rs-0.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f5cad59649de4217cbca10e7f20299e0af3c3cedfefb0feb8a30cd01197be95c"}, + {file = "chia_rs-0.10.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:321120a0faa8421dddbed66dee856836de7fab6aa50ec3cd37d78baa14f9876f"}, + {file = "chia_rs-0.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c927bb3302f14db4ed851db2f06825c320616815b726d7c21f3383621b413f1"}, + {file = "chia_rs-0.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57f152b0c699ad1ff263b13f18c91fd2737cb4fe765c1ef45544e3c313701cd0"}, + {file = "chia_rs-0.10.0-cp38-none-win_amd64.whl", hash = "sha256:c69a32db20ee5f06ca4cf4a1dca681b445ed1b16e1ad83f1bccb8f4bef4af0db"}, + {file = "chia_rs-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:72ad59dfc3a5d610715fdca0a1df3015c6e72bd6d984d696cde92256c3a8851f"}, + {file = "chia_rs-0.10.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:4cb0d926ed0d3ae25f66d9ece637add60318810afa435de3ebb49c3fe0f76b7c"}, + {file = "chia_rs-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1d3b7f6b0de8e888c55bd2e538f8b1740a4a1610d495dbedf06778543708f9d"}, + {file = "chia_rs-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba284ea87165d3a654c5a638f0c119eb9a95f44cb98d8e0a6f641e0717ee02f8"}, + {file = "chia_rs-0.10.0-cp39-none-win_amd64.whl", hash = "sha256:5ca68b2e4b90d9afa1770a06d27a8ea5c0d0569d9e62a875cefe3bb060e32e9a"}, + {file = "chia_rs-0.10.0.tar.gz", hash = "sha256:fca56c9a292bbfe7ffbe892c8a831ec30bfc4748f42cfb11f828a687fc7c0e2f"}, +] + +[[package]] +name = "chiabip158" +version = "1.5.1" +description = "Chia BIP158 (wraps C++)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chiabip158-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c889ad321d4aca4e0a9820665d264d1822b8d2e2402eaf5a3d36a5fdb5502b76"}, + {file = "chiabip158-1.5.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:cda3bdf571919b510a739d0bfa4a936da6f2df0a87946c8f78b4d87e0a6c6274"}, + {file = "chiabip158-1.5.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:54c31058d6425b707373c1d1ee377c5963bf0f027b36d1cb9efd93c251444bf8"}, + {file = "chiabip158-1.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3111434ebc61ec182a04841adc23bc19a093457f4f4893df388d9e62d7890a88"}, + {file = "chiabip158-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:7530bf8253a3774415e400c927f9b292a63038160e00afd46ed0a3d49a677214"}, + {file = "chiabip158-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61bf7d6092ae5db09ff962cddf93ab851a74218c07768ffd3b981bd05766bed5"}, + {file = "chiabip158-1.5.1-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:9baffcfbcb26d630b35f6a0b4b64b3acd4b70ca8badf9728c4d5544a1e727d4e"}, + {file = "chiabip158-1.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d468a38ea70016db649b95bd647d2c7225a6ea7d8187dd4c33b5298cc4f4002"}, + {file = "chiabip158-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80249a8e8e7a1a9239fa3d5166aa0106d74bcd1ccd5d6f63018d2ea1e5e81c5f"}, + {file = "chiabip158-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:5aebca9e77e41c16e0e764534c50fbf262ec6c868834a37597f82b5813a48180"}, + {file = "chiabip158-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a964b036cef3d8dbb75368109aa00c0c83659cda02bf97b4de34f20ab8ccc58a"}, + {file = "chiabip158-1.5.1-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:9039051893b5f526d916da771a8a920d082c2a8ac07f0020d59754c0bf3fd60a"}, + {file = "chiabip158-1.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc169685d5d5896a58cf795a3ecf8eb35ca388db823878233b45aef3f1498971"}, + {file = "chiabip158-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e54ee97844c327ddcfc4a1053671238b79d0d9a23524858a6b4b85ac26e2ac0"}, + {file = "chiabip158-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:09405564e2a42b08a698d3865669a6a1aff66f3d55947304a2b7e7702e97d81a"}, + {file = "chiabip158-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f6b25950aa04168110729a5949480b3031cc9670b0cc1ac13e7b43d3317e78e"}, + {file = "chiabip158-1.5.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:a7794c92db245ea3ccdc7579c2ce1191d9a5b24512d9404c724b03009e035403"}, + {file = "chiabip158-1.5.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bec5fcdc258944e6bc73efb478ae873b28f11bb54a3137ddc4f220c3a5733714"}, + {file = "chiabip158-1.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38624f9a99193c44c0c1b88a2ab8b78221092c72a294904a1a596c3cf412f82d"}, + {file = "chiabip158-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:2470e45be8e48d902b3adf1be048e2440f13a0d4a2515848bf30e4087d8de51a"}, + {file = "chiabip158-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2d5308a6670df533e161d0b7c6e2f03e58b2d8670c3661d144338f8d2153f5b"}, + {file = "chiabip158-1.5.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:59a422e8888124250e9a1dbf1ec43ea29b640ed0379053724cd5f027b42dbdfa"}, + {file = "chiabip158-1.5.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:78086ceb66dcd5001bf1700de4e3b484a97862b398940238e6b3d56c668cbbe4"}, + {file = "chiabip158-1.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f41a044cbfebac2f92377902293a5e45e9445d811f721eb176f5790e6a6fe97"}, + {file = "chiabip158-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:bfeece41abea633aab2b768ca704f981472dd2878e9c89cbe46a84c3fb4459c9"}, + {file = "chiabip158-1.5.1.tar.gz", hash = "sha256:c883fe663bc7ea6951ad91be40e0ebcddadfe5049f82e531db989b8710bcd8c9"}, +] + +[[package]] +name = "chialisp-loader" +version = "0.1.2" +description = "Provides `load_puzzle` which dynamic rebuilds if `chialisp_builder` is available." +optional = false +python-versions = "*" +files = [ + {file = "chialisp_loader-0.1.2.tar.gz", hash = "sha256:085080fc7d4a3fab90fe93db3c599391b869047846608da1ae92b54b4100e8a6"}, +] + +[package.dependencies] +clvm_rs = "*" +importlib_resources = "*" + +[[package]] +name = "chialisp-puzzles" +version = "0.1.1" +description = "Some canonical puzzles deployed on chia-blockchain" +optional = false +python-versions = "*" +files = [ + {file = "chialisp_puzzles-0.1.1.tar.gz", hash = "sha256:d474fa44d070da6a44e236bbabb6baefb38ff6b24049399924d2d9e638fa66a8"}, +] + +[package.dependencies] +chialisp_loader = ">=0.1.2" + +[[package]] +name = "chiapos" +version = "2.0.4" +description = "Chia proof of space plotting, proving, and verifying (wraps C++)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chiapos-2.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa7d13ad72c97d6081ab494f41ea5dbb1e8e7898ba644b45f4512443a4368f45"}, + {file = "chiapos-2.0.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:a9e2cf709690efafa16fa1f66c5b5b8a0e00ed0fa4c66ce6939828035f4bb9f9"}, + {file = "chiapos-2.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86a6822b5aa7c5855cd5ff59c6644a441a01c2ce09c38a47a292f8bebb0a042c"}, + {file = "chiapos-2.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9394d1c02c56829951379d6a2ef155f33760a3722e926f03e5e1e771e7161cd6"}, + {file = "chiapos-2.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:579c87cd8e028f008fe75fb37ad0aea8a80a81774ac269c4bbb6195a5a4f7937"}, + {file = "chiapos-2.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8d691cf50beae2f8f7e751a9a376701997b72c8e0c1d8fa56d2c441f4290c30a"}, + {file = "chiapos-2.0.4-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:c8c6a0d4a302edb8cecddc253db876ca1727149ddc6911dd255ee058d39c1d78"}, + {file = "chiapos-2.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2729ca64281b31d05bc1160860a8ed654f2a34a92775ef13292f25e5b64d34ad"}, + {file = "chiapos-2.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca1a2f4fe886b71af206966b5c8001df5703d0d3b5a47bf7c207325b25ab784d"}, + {file = "chiapos-2.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:2c11c6891217ab49e20ef4b9ae1e2e6aaed752fd41e360515b65ee36c0ec29f8"}, + {file = "chiapos-2.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cc946f0bb74313c85a5fd769ba2d36a9a3684a111cde38293c8ed7b54f9a4d12"}, + {file = "chiapos-2.0.4-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:0c9f315b12ddb9b7a726b19711c841b7f199c568963dd6473495074fc23b8f63"}, + {file = "chiapos-2.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe238271df585a8277b46775124f4cef1dc28a2a475e9950ec292eadacac1c3f"}, + {file = "chiapos-2.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e120bbca94b511c7fb1c011efbfdd12d2a569a084fed26e53051650fdf28bf2"}, + {file = "chiapos-2.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:f662b5b58edc1aebee14e7e0085e9e19793efdf2dfb09956e0fb082bffcd48bd"}, + {file = "chiapos-2.0.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:f1e2f828b340160813c573bd0c7b11011332203e1c36fa791a685db9c54f2c5b"}, + {file = "chiapos-2.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5a259f2557d38b4b775ea1943e553b47db599bbd2ba6784f1c26e9ac1cdead8"}, + {file = "chiapos-2.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a286924cbba2770551bea1b315741d9980a67f242fe99629d84708a4e3b926df"}, + {file = "chiapos-2.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:f75920e0a6355b4882834b74d6a325f7bc9b4f20ca776ac79ecbc8dab91dd640"}, + {file = "chiapos-2.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:86ec4db0980ed9a9e1d72a6a0874c2b51ba58af42f1765fa8b32e674ad46af75"}, + {file = "chiapos-2.0.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:82f7c5b2ef0f514321b7edd2ef864e8927ba8b7c15c27a6621ea6fb7a7ad2d19"}, + {file = "chiapos-2.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ae108016b0cad03cd3fa25caf57924781e6e62c99151be581e976966bf48f7d"}, + {file = "chiapos-2.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4060e71c468c654778458a47a05a68b6599aeb2f66e2794a820e11b89c6e3c33"}, + {file = "chiapos-2.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:1dbc6d7e3ce961ecd99c48e9df8d1448ea8623b228927ff91517b903fcf1486f"}, + {file = "chiapos-2.0.4.tar.gz", hash = "sha256:6498d75a7ac01bb64055055b2937f7f964270582a2f82be9a332aa828fc3e3ce"}, +] + +[[package]] +name = "chiavdf" +version = "1.1.4" +description = "Chia vdf verification (wraps C++)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "chiavdf-1.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0256d93be3e4921596576aff22254ab1810c5092c78c5d87f39a229df3fa6ca"}, + {file = "chiavdf-1.1.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:11a36b6f9f7752b0beb6e525cb1ea483cc78c513b0e7066dedfe6feb5eafd44a"}, + {file = "chiavdf-1.1.4-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:986673742982baa3afb7ff0e9580e23b7b7c60e367eac8c63ffbceb07bf702b1"}, + {file = "chiavdf-1.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787e7fc55f4b54b1d1b9779b1abecf58142c30c9a19607e4277e4bd4ada5fb4b"}, + {file = "chiavdf-1.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:7732c8731081c132ef14acab134646c91b956267310af822a0ecd03aa884647b"}, + {file = "chiavdf-1.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5301ca8f5f02c7d77732a5b04d0620fef3667246494b8cb7ec95155069855d58"}, + {file = "chiavdf-1.1.4-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5b98ce04bd15295c79b88c4ba53d21fe76b2412b59e57e7c1e7352755978a084"}, + {file = "chiavdf-1.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29d8f0d5426f6cd1ec90d8634a97e18a468ac9c12674c64d48cdb3872f38e8b1"}, + {file = "chiavdf-1.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8231d3b8eaa384fda651efc20089c5ada1227c19e36212b5ad98cb7dc7c57cb"}, + {file = "chiavdf-1.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:b1fc151af42e09fd2b613fe639375b8baa21dde792a3899aa1f5aa22bf2ab898"}, + {file = "chiavdf-1.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:debe6cca2f6f7963e815ff00a9c9b12a0159b89e1d1962269c3da7ad342af45d"}, + {file = "chiavdf-1.1.4-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:d1575d0b4eb9065d82566c4cae9aaa153fa0ebba7fd21fc7b5407fa3a8ec0819"}, + {file = "chiavdf-1.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2bef1c3173b3ec0c6b34cbc1bbc32dfbb2801e048acccf732b9fb2bb6c0ac70"}, + {file = "chiavdf-1.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55fc997e41e95cd24e54d308f23312d73c2759bddbcf338bd74a359359db6f6f"}, + {file = "chiavdf-1.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:ac8c5ffc4a90992c05ded68a882776bdc2c916794f687f142b755aa7e7bf59cc"}, + {file = "chiavdf-1.1.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b5cf9322da348b48d2dd0546a384a5574ffd37fd10a8c3c6d19ccfc279237fa"}, + {file = "chiavdf-1.1.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:99245e171be8ca34600d7f3286928bb11b53f4d29f7c0211d1767675f514791f"}, + {file = "chiavdf-1.1.4-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a5355c71598d6764a5225a13b45ea73bdfaf586e3765ba91e0b348a2456729bc"}, + {file = "chiavdf-1.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d48bdc419d544e068a71ab76f0d56ca6201fd4dd7269f137b8bb4521d494f12e"}, + {file = "chiavdf-1.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:24269407dfb37a674f016babdb410b61c05e0d04383487acdaa78cab8df15d91"}, + {file = "chiavdf-1.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e9dcdf1fdfc6d0c223f662098595cd697a674de224ce9d9fa00fc574f68f2868"}, + {file = "chiavdf-1.1.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:90b3e21e30227c71ef012c55b0625cd19852fcfa3a080054779039d0478c1f8a"}, + {file = "chiavdf-1.1.4-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e62e809294dbed49f4ac61ffda5d4e86a6b585d6cb29710ba6b630f90702de52"}, + {file = "chiavdf-1.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:892d82aae2871ed9e57dd5efa42f53a91c6c12c9d46e45e1d224ae1a9a4e6a78"}, + {file = "chiavdf-1.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:e68c6a5610b5a451ce1d1ea058f34320b35419b53b11da4a9affd8ab4c86e325"}, + {file = "chiavdf-1.1.4.tar.gz", hash = "sha256:17588eff94437a99640e568b7ef4ebcc76596b925f332b47f74f29b50622c98c"}, +] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "clvm" +version = "0.9.10" +description = "[Contract Language | Chialisp] Virtual Machine" +optional = false +python-versions = "<4,>=3.8.1" +files = [ + {file = "clvm-0.9.10-py3-none-any.whl", hash = "sha256:d74a777b4ed8bb36b8a65a31c102470b6a5c6673abc98fa4f6dead476329d367"}, +] + +[package.dependencies] +chia-rs = ">=0.2.13" +importlib-metadata = ">=6.11.0,<6.12.0" +typing-extensions = ">=4.0,<5.0" + +[package.extras] +dev = ["clvm-tools (>=0.4.4)", "mypy", "pytest", "setuptools", "types-setuptools"] + +[[package]] +name = "clvm-rs" +version = "0.2.5" +description = "Implementation of `clvm` for Chia Network's cryptocurrency" +optional = false +python-versions = "*" +files = [ + {file = "clvm_rs-0.2.5-cp37-abi3-macosx_10_14_x86_64.whl", hash = "sha256:980933a048779a9e587194385bd86b806fa05931b92ec75350bb433344a23ba8"}, + {file = "clvm_rs-0.2.5-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:38cffe8a97acb6aebef758be8bce18a007e79328c76e732b2cf9e1a4f22c02c7"}, + {file = "clvm_rs-0.2.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7c48294f4211159fbfc40a62655713f03867104f9892d875f3dae2515bdd6b8"}, + {file = "clvm_rs-0.2.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:403b8c82ee5c0137792fd975649b1d1fc92170669d787e501978242a72ac0361"}, + {file = "clvm_rs-0.2.5-cp37-abi3-win_amd64.whl", hash = "sha256:77a2f5f626dc0cd5196c97fb689ceac9f853f85b7d622d630840aa597a97e4f0"}, + {file = "clvm_rs-0.2.5.tar.gz", hash = "sha256:af26493752dc2e5e7ebdb7bdfa10160c5d9e1b39df3f238cd1641ef98e8c446f"}, +] + +[[package]] +name = "clvm-tools" +version = "0.4.9" +description = "CLVM compiler." +optional = false +python-versions = "*" +files = [ + {file = "clvm_tools-0.4.9-py3-none-any.whl", hash = "sha256:a38419106da781bc0130f5911bc57748be6ddddba3d7809bb58ab930e84adb7d"}, +] + +[package.dependencies] +clvm = ">=0.9.2" +clvm-tools-rs = ">=0.1.37" +importlib-metadata = "*" +setuptools = "*" + +[package.extras] +dev = ["pytest"] + +[[package]] +name = "clvm-tools-rs" +version = "0.1.40" +description = "tools for working with chialisp language; compiler, repl, python and wasm bindings" +optional = false +python-versions = "*" +files = [ + {file = "clvm_tools_rs-0.1.40-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:742ae2667163ba365df3413a0c0394793df708043e482f1a7be775ecb6a2475d"}, + {file = "clvm_tools_rs-0.1.40-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:1eca4f1bf8005f37666cc87c0dcd945d0d3d65ff51918f7ace86d428aab74cc6"}, + {file = "clvm_tools_rs-0.1.40-cp38-abi3-macosx_11_0_x86_64.whl", hash = "sha256:7a3bdf661e4ad77f04dceb96d0f097f8a3192b7a28dc0a5a56dfa18e66617d91"}, + {file = "clvm_tools_rs-0.1.40-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e3b88443e823318cbcef37161152159307e385923118c48851fcbd91e13101"}, + {file = "clvm_tools_rs-0.1.40-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b4edd053f895366f0dedae6aa22c26db7fd78bad161baf4f81b76a7b824af4"}, + {file = "clvm_tools_rs-0.1.40-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:4eba19e55054c635ac99f0ce37d6dff16f4400af065965ccee315dfebffa260f"}, + {file = "clvm_tools_rs-0.1.40-cp38-abi3-win_amd64.whl", hash = "sha256:b7a9ee6000bd437a6898241574f37e225027c4fe1781ff9286cb71fd30d07a4b"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "colorlog" +version = "6.8.2" +description = "Add colours to the output of Python's logging module." +optional = false +python-versions = ">=3.6" +files = [ + {file = "colorlog-6.8.2-py3-none-any.whl", hash = "sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33"}, + {file = "colorlog-6.8.2.tar.gz", hash = "sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + +[[package]] +name = "concurrent-log-handler" +version = "0.9.25" +description = "RotatingFileHandler replacement with concurrency, gzip and Windows support" +optional = false +python-versions = ">=3.6" +files = [ + {file = "concurrent_log_handler-0.9.25-py3-none-any.whl", hash = "sha256:157bee12914aa2a72246d1d0641ce07c1aa7a55faa3322bed02f21e60395eb82"}, + {file = "concurrent_log_handler-0.9.25.tar.gz", hash = "sha256:1e2c6f021414e214d3dac66107894827a3e78db63018304a4f29e55ba549ac22"}, +] + +[package.dependencies] +portalocker = ">=1.6.0" + +[[package]] +name = "coverage" +version = "7.5.3" +description = "Code coverage measurement for Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, + {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, + {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, + {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, + {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, + {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, + {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, + {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, + {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, + {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, + {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, + {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, + {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "42.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "diff-cover" +version = "9.0.0" +description = "Run coverage and linting reports on diffs" +optional = true +python-versions = "<4.0.0,>=3.8.10" +files = [ + {file = "diff_cover-9.0.0-py3-none-any.whl", hash = "sha256:31b308259b79e2cab5f30aff499a3ea3ba9475f0d495d82ba9b6caa7487bca03"}, + {file = "diff_cover-9.0.0.tar.gz", hash = "sha256:1dc851d3f3f320c048d03618e4c0d9861fa4a1506b425d2d09a564b20c95674a"}, +] + +[package.dependencies] +chardet = ">=3.0.0" +Jinja2 = ">=2.7.1" +pluggy = ">=0.13.1,<2" +Pygments = ">=2.9.0,<3.0.0" + +[package.extras] +toml = ["tomli (>=1.2.1)"] + +[[package]] +name = "dill" +version = "0.3.7" +description = "serialize all of Python" +optional = true +python-versions = ">=3.7" +files = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + +[[package]] +name = "distlib" +version = "0.3.7" +description = "Distribution utilities" +optional = true +python-versions = "*" +files = [ + {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, + {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, +] + +[[package]] +name = "dnslib" +version = "0.9.24" +description = "Simple library to encode/decode DNS wire-format packets" +optional = false +python-versions = "*" +files = [ + {file = "dnslib-0.9.24-py2-none-any.whl", hash = "sha256:4f26c55603ce9f961b84404f19ff03b3ca4a051eafb2b1e141ef9b96485467c6"}, + {file = "dnslib-0.9.24-py3-none-any.whl", hash = "sha256:39327e695f871574198b76ef506d9691d762b5344e0d66f5f78fefe1df99e7fd"}, + {file = "dnslib-0.9.24.tar.gz", hash = "sha256:ef167868a30d4ce7c90b921279d7ecfb986be8ebc530f3e6050a2ecb68707c76"}, +] + +[[package]] +name = "dnspython" +version = "2.6.1" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=0.9.25)"] +idna = ["idna (>=3.6)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "docutils" +version = "0.20.1" +description = "Docutils -- Python Documentation Utilities" +optional = true +python-versions = ">=3.7" +files = [ + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.1.3" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "execnet" +version = "2.1.1" +description = "execnet: rapid multi-Python deployment" +optional = true +python-versions = ">=3.8" +files = [ + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, +] + +[package.extras] +testing = ["hatch", "pre-commit", "pytest", "tox"] + +[[package]] +name = "filelock" +version = "3.14.0" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, + {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "flake8" +version = "7.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = true +python-versions = ">=3.8.1" +files = [ + {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, + {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "hsms" +version = "0.3.1" +description = "Hardware security module simulator for chia bls12_381 signatures" +optional = false +python-versions = "*" +files = [ + {file = "hsms-0.3.1-py3-none-any.whl", hash = "sha256:d05a5598751f856b126d2fa6606f70c975304e7ab707dd7092b1ac2eed6267ec"}, + {file = "hsms-0.3.1.tar.gz", hash = "sha256:22ee883399cba0e153a5194b11e04c93348cafef899ad5b34905a8ea60a167e0"}, +] + +[package.dependencies] +chia-base = ">=0.1.5" +chialisp-puzzles = ">=0.1.1" +segno = "1.4.1" + +[package.extras] +dev = ["black (>=22.6)", "flake8 (>=4.0.1)", "pytest"] + +[[package]] +name = "identify" +version = "2.5.30" +description = "File identification library for Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.30-py2.py3-none-any.whl", hash = "sha256:afe67f26ae29bab007ec21b03d4114f41316ab9dd15aa8736a167481e108da54"}, + {file = "identify-2.5.30.tar.gz", hash = "sha256:f302a4256a15c849b91cfcdcec052a8ce914634b2f77ae87dad29cd749f2d88d"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.11.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = true +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = true +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jaraco-classes" +version = "3.3.0" +description = "Utility functions for Python class constructs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.classes-3.3.0-py3-none-any.whl", hash = "sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb"}, + {file = "jaraco.classes-3.3.0.tar.gz", hash = "sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[[package]] +name = "jaraco-context" +version = "5.3.0" +description = "Useful decorators and context managers" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.context-5.3.0-py3-none-any.whl", hash = "sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266"}, + {file = "jaraco.context-5.3.0.tar.gz", hash = "sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2"}, +] + +[package.dependencies] +"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["portend", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jaraco-functools" +version = "4.0.1" +description = "Functools like those found in stdlib" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.functools-4.0.1-py3-none-any.whl", hash = "sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664"}, + {file = "jaraco_functools-4.0.1.tar.gz", hash = "sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.classes", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jeepney" +version = "0.8.0" +description = "Low-level, pure Python DBus protocol wrapper." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, +] + +[package.extras] +test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] +trio = ["async_generator", "trio"] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = true +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "keyring" +version = "25.1.0" +description = "Store and access your passwords safely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "keyring-25.1.0-py3-none-any.whl", hash = "sha256:26fc12e6a329d61d24aa47b22a7c5c3f35753df7d8f2860973cf94f4e1fb3427"}, + {file = "keyring-25.1.0.tar.gz", hash = "sha256:7230ea690525133f6ad536a9b5def74a4bd52642abe594761028fc044d7c7893"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} +importlib-resources = {version = "*", markers = "python_version < \"3.9\""} +"jaraco.classes" = "*" +"jaraco.context" = "*" +"jaraco.functools" = "*" +jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} +pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} +SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} + +[package.extras] +completion = ["shtab (>=1.1.0)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "keyrings-cryptfile" +version = "1.3.9" +description = "Encrypted file keyring backend" +optional = true +python-versions = ">=3.5" +files = [ + {file = "keyrings.cryptfile-1.3.9.tar.gz", hash = "sha256:7c2a453cab9985426b8c21f7ad54a57e49ff8e819ba18e08340bd8801acf0091"}, +] + +[package.dependencies] +argon2_cffi = "*" +"jaraco.classes" = "*" +keyring = ">=20.0.0" +pycryptodome = "*" + +[[package]] +name = "lxml" +version = "5.2.2" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = true +python-versions = ">=3.6" +files = [ + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, + {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, + {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, + {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, + {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, + {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, + {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, + {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, + {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, + {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, + {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, + {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, + {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, + {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, + {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, + {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, + {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, + {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, + {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.10)"] + +[[package]] +name = "macholib" +version = "1.16.3" +description = "Mach-O header analysis and editing" +optional = true +python-versions = "*" +files = [ + {file = "macholib-1.16.3-py2.py3-none-any.whl", hash = "sha256:0e315d7583d38b8c77e815b1ecbdbf504a8258d8b3e17b61165c6feb60d18f2c"}, + {file = "macholib-1.16.3.tar.gz", hash = "sha256:07ae9e15e8e4cd9a788013d81f5908b3609aa76f9b1421bae9c4d7606ec86a30"}, +] + +[package.dependencies] +altgraph = ">=0.17" + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = true +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = true +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = true +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = true +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "memory-profiler" +version = "0.61.0" +description = "A module for monitoring memory usage of a python program" +optional = true +python-versions = ">=3.5" +files = [ + {file = "memory_profiler-0.61.0-py3-none-any.whl", hash = "sha256:400348e61031e3942ad4d4109d18753b2fb08c2f6fb8290671c5513a34182d84"}, + {file = "memory_profiler-0.61.0.tar.gz", hash = "sha256:4e5b73d7864a1d1292fb76a03e82a3e78ef934d06828a698d9dada76da2067b0"}, +] + +[package.dependencies] +psutil = "*" + +[[package]] +name = "miniupnpc" +version = "2.2.2" +description = "miniUPnP client" +optional = true +python-versions = "*" +files = [ + {file = "miniupnpc-2.2.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:0128af803e268aabe6c756946556aca8588e108c1c5112f8023114337c476ba9"}, + {file = "miniupnpc-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bb29594482b0b8dc01048f07aa339d87f4c4116fafe8d09cc5709845113abfdf"}, + {file = "miniupnpc-2.2.2-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5df96d46fff84c5c077e175f55d22aae62b85e533f7f58877bbe497594098fc7"}, + {file = "miniupnpc-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02a2511a2439a82007e486418103a506de58644d93fbaef96ee1bf935a112aa1"}, + {file = "miniupnpc-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6933a2aa0db6ced5355bebf75a1f7e14c420fa305dd4f1d80529a8080c49af2f"}, + {file = "miniupnpc-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:f528a3323b9ce42d8a662464a953c1e1c12665dbf4a40f2ac9c457be31788ace"}, + {file = "miniupnpc-2.2.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:01b9bf6a0078347b53ac70b297307b962b6b4f4c9dc5f6f1bbcbc1594faa61bc"}, + {file = "miniupnpc-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:be37e935f9adaa954afb980de50f8cbd8d0a2e8056380085e0aa794a32811bc7"}, + {file = "miniupnpc-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1920851bb316c1454ca603e919ff29ecd981563016ce1fb4ee9027a57d682057"}, + {file = "miniupnpc-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c03549903fb9023c84b5a69d85ba48c992ec67d92cade2c77660c754a7fe0851"}, + {file = "miniupnpc-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:815b2747cae3cfc27545b737472dbe707ac2dde02d0fc3a60f994810004413c8"}, + {file = "miniupnpc-2.2.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:0e965e2920b959946e722930c34299b797c321bc518fb168901b3dd6585a7dea"}, + {file = "miniupnpc-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ef06d2b966805e787afb3ddae3375cef669ff42cd358f6426241a259f539615d"}, + {file = "miniupnpc-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bdec2a5f6544af1b1cc6619f08e5dd2bfc92e489b424377ec4a37f109483950"}, + {file = "miniupnpc-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf689d111d9a4ccc0e02b7b678f326a3a2bfe650cbf50a0bd9199fec88a16c2d"}, + {file = "miniupnpc-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:5bec8bf1c7bacd522fd7965c93575915f43f719f37d3d552ea3c57e874bbdc20"}, + {file = "miniupnpc-2.2.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:ea40595375999cb0fd4f958ad4dcbf9a7ae358c83279d0348e687f413227d815"}, + {file = "miniupnpc-2.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:081d048b81b0279e8f05fb84bcd43edcf3812a3e3301286dd7c9adff915aa87c"}, + {file = "miniupnpc-2.2.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0f45faa06df724b8048bc76e449e12baaeda1cad4b25992549d90606b62b9d21"}, + {file = "miniupnpc-2.2.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:df03877034fdcfdfcd54d3d08f647cdb6f4bf12f2efea7cf295e216dda12ca54"}, + {file = "miniupnpc-2.2.2-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8cade5e8ce7ba9e017b0207642083ac841713663712d02a8db7f3bc0e563a1db"}, + {file = "miniupnpc-2.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5950188d932b1d12786964c5a8ec860e7f62edddac987766bf7dd1a303a7b6b9"}, + {file = "miniupnpc-2.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:de0ad59f8f04f4ae327fb5bbec0f0f27a0b0e488fc4dc015359bd65decab21b4"}, + {file = "miniupnpc-2.2.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:7c31bb37f61de3e86185f1899ec5f3d37b8d12d3efc79ce6b39a458982cbaba0"}, + {file = "miniupnpc-2.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d66a77d0c5090b985b9d1b956fb1838754e3cd990c6b668c710b35eb37a6dc6d"}, + {file = "miniupnpc-2.2.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ddcbe2d77052acdee27440a0bbe7cf4b36cb3c60ba3196e577a9fea42a99ec9e"}, + {file = "miniupnpc-2.2.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:31c78f6205a25d5a9b2b8a09192214f74f5374ddc426a2e7db479e6e09270c28"}, + {file = "miniupnpc-2.2.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:9f32e0e4f17222e446492594ea53178273d556a07bd5b2427eae73d005de92ff"}, + {file = "miniupnpc-2.2.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa5f81e98c3ffb11e9886795e05d10a980cd167591106dcd62cec0883be46def"}, + {file = "miniupnpc-2.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f52ce679c7da3c5d9f53ffef6e48e775aee84d837181b07a7072cd67b6ebd3e8"}, + {file = "miniupnpc-2.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:5a49a3acd557c76f353b2dbbe68bdd01f0ef048b8c055189b2fe5f575eb8df4d"}, + {file = "miniupnpc-2.2.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:ea3e1fec76ebd87468445221d46d52f7403acff1997a88561e07e5e8727a585a"}, + {file = "miniupnpc-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:406a3fd262998b0769b72e090a9fceadd5587d01854c2c7d774258f5d01a19b3"}, + {file = "miniupnpc-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1cea18b70d9b79c42eb1260f2542883f87da9743594bf87a6f4d3efc60dada1b"}, + {file = "miniupnpc-2.2.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:4f50c40ef9f8efd3f29dc7ff8436fe829701ce7e21e0b55c716fc52c504e376d"}, + {file = "miniupnpc-2.2.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ee059a9ebcbe9da129002b9501c16c98ed8ba641aa882ddc43e3ce41458357c3"}, + {file = "miniupnpc-2.2.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:07d1dcb7467abcceb4fa613b08c385630507f02981db2fa4ab1f813f0ad267fe"}, + {file = "miniupnpc-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4ce0c86b359661c180a56a71b4c82e1531f0aca2160658b65b5cd6740d39cc2"}, + {file = "miniupnpc-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:14229166c2998b4bffcda059fc04d344d53ab1b51a16717f55fa22944c3f13ac"}, + {file = "miniupnpc-2.2.2.tar.gz", hash = "sha256:d62f2fa0f7da884308dd1cca988e42ae56b72a2eb995d4aa9837f648f519dc51"}, +] + +[package.source] +type = "legacy" +url = "https://pypi.chia.net/simple" +reference = "chia" + +[[package]] +name = "more-itertools" +version = "10.1.0" +description = "More routines for operating on iterables, beyond itertools" +optional = false +python-versions = ">=3.8" +files = [ + {file = "more-itertools-10.1.0.tar.gz", hash = "sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a"}, + {file = "more_itertools-10.1.0-py3-none-any.whl", hash = "sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6"}, +] + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "mypy" +version = "1.10.0" +description = "Optional static typing for Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, + {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, + {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, + {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, + {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, + {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, + {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, + {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, + {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, + {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, + {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, + {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, + {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, + {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, + {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, + {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, + {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, + {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = true +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nh3" +version = "0.2.14" +description = "Ammonia HTML sanitizer Python binding" +optional = true +python-versions = "*" +files = [ + {file = "nh3-0.2.14-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a"}, + {file = "nh3-0.2.14-cp37-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6"}, + {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4"}, + {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5"}, + {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d"}, + {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6"}, + {file = "nh3-0.2.14-cp37-abi3-win32.whl", hash = "sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873"}, + {file = "nh3-0.2.14-cp37-abi3-win_amd64.whl", hash = "sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e"}, + {file = "nh3-0.2.14.tar.gz", hash = "sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = true +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = true +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "pefile" +version = "2023.2.7" +description = "Python PE parsing module" +optional = true +python-versions = ">=3.6.0" +files = [ + {file = "pefile-2023.2.7-py3-none-any.whl", hash = "sha256:da185cd2af68c08a6cd4481f7325ed600a88f6a813bad9dea07ab3ef73d8d8d6"}, + {file = "pefile-2023.2.7.tar.gz", hash = "sha256:82e6114004b3d6911c77c3953e3838654b04511b8b66e8583db70c65998017dc"}, +] + +[[package]] +name = "pip" +version = "24.0" +description = "The PyPA recommended tool for installing Python packages." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pip-24.0-py3-none-any.whl", hash = "sha256:ba0d021a166865d2265246961bec0152ff124de910c5cc39f1156ce3fa7c69dc"}, + {file = "pip-24.0.tar.gz", hash = "sha256:ea9bd1a847e8c5774a5777bb398c19e80bcd4e2aa16a4b301b718fe6f593aba2"}, +] + +[[package]] +name = "pkginfo" +version = "1.9.6" +description = "Query metadata from sdists / bdists / installed packages." +optional = true +python-versions = ">=3.6" +files = [ + {file = "pkginfo-1.9.6-py3-none-any.whl", hash = "sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546"}, + {file = "pkginfo-1.9.6.tar.gz", hash = "sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046"}, +] + +[package.extras] +testing = ["pytest", "pytest-cov"] + +[[package]] +name = "platformdirs" +version = "3.11.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = true +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "portalocker" +version = "2.8.2" +description = "Wraps the portalocker recipe for easy usage" +optional = false +python-versions = ">=3.8" +files = [ + {file = "portalocker-2.8.2-py3-none-any.whl", hash = "sha256:cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e"}, + {file = "portalocker-2.8.2.tar.gz", hash = "sha256:2b035aa7828e46c58e9b31390ee1f169b98e1066ab10b9a6a861fe7e25ee4f33"}, +] + +[package.dependencies] +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} + +[package.extras] +docs = ["sphinx (>=1.7.1)"] +redis = ["redis"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] + +[[package]] +name = "pre-commit" +version = "3.5.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = true +python-versions = ">=3.8" +files = [ + {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, + {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pre-commit" +version = "3.7.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = true +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, + {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "psutil" +version = "5.9.4" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, + {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, + {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, + {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, + {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, + {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, + {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, + {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "psutil" +version = "5.9.4" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "psutil-5.9.4-cp37-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:56d557d85b8464e9b8ace0995705a146225b94bdad8fa52f266beae0c3211ca3"}, + {file = "psutil-5.9.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a21eb32d5ef5782a29b02b06912336a211d9665d6f6de39b0928c7f0481d1e16"}, + {file = "psutil-5.9.4-cp38-abi3-macosx_10_14_x86_64.whl", hash = "sha256:6a8d6bfca2cdb7d52299446a13985ceaa889be39d5701410c21be567e32c3ede"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[package.source] +type = "legacy" +url = "https://pypi.chia.net/simple" +reference = "chia" + +[[package]] +name = "py3createtorrent" +version = "1.1.0" +description = "Create torrents via command line!" +optional = true +python-versions = ">=3.5, <4" +files = [ + {file = "py3createtorrent-1.1.0-py3-none-any.whl", hash = "sha256:2d2e16b49e1071f057f685aa56fab7948f7907dceed1e86731473425bf69f42d"}, + {file = "py3createtorrent-1.1.0.tar.gz", hash = "sha256:afd8b39e04d698832819877be2b45727993cfbfe77bd22a35aa032044b1dafa2"}, +] + +[package.dependencies] +"bencode.py" = "*" + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pycryptodome" +version = "3.20.0" +description = "Cryptographic library for Python" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, + {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, +] + +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = true +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyinstaller" +version = "6.7.0" +description = "PyInstaller bundles a Python application and all its dependencies into a single package." +optional = true +python-versions = "<3.13,>=3.8" +files = [ + {file = "pyinstaller-6.7.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:6decedba07031d1318528cb76d8400ae1572f7b08197f771ceca9e454e0060bf"}, + {file = "pyinstaller-6.7.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0756b3d4d3283ae2a5bda56abe479b80801ecafecdb3a96cd928542c2c75d016"}, + {file = "pyinstaller-6.7.0-py3-none-manylinux2014_i686.whl", hash = "sha256:df1b66500a7def997790bdadc23c142a2f96585ccd440beac63b72a4f3e41684"}, + {file = "pyinstaller-6.7.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:fa552214a8cbb5bfe4621c46a73c3cce12f299a520aa5ac397dc18718278f03a"}, + {file = "pyinstaller-6.7.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:5263ecbfd34a2297f0e5d41ecfcf7a6fb1ebbf60dbe0dc7c2d64f4a55871a99d"}, + {file = "pyinstaller-6.7.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:4ff8ce04f1e5ab3a65d4a1ee6036cba648d0cdae6a7a33c6f0ca4ace46cdd43c"}, + {file = "pyinstaller-6.7.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:95efc2de7722213f376c5bac9620f390899f9a3c9eed70bd65adf29e2a085d5f"}, + {file = "pyinstaller-6.7.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:1b6dd6a50a7315214d345875cd08f8aa71025e7ba6bfa0f95c09285585e8d372"}, + {file = "pyinstaller-6.7.0-py3-none-win32.whl", hash = "sha256:73b94ce02b208c34eaabd032dd1522a3c03c0b3118a31bf7e4eafe7a9f4af2da"}, + {file = "pyinstaller-6.7.0-py3-none-win_amd64.whl", hash = "sha256:a3f85935b40f89e717f1e67377d3bfc953060e5795828ecf5357e2c1f7aa52bf"}, + {file = "pyinstaller-6.7.0-py3-none-win_arm64.whl", hash = "sha256:53038419ca09eea59de02dfb52453dd327983b0957821be610fb04cfd84676d0"}, + {file = "pyinstaller-6.7.0.tar.gz", hash = "sha256:8f09179c5f3d1b4b8453ac61adfe394dd416f9fc33abd7553f77d4897bc3a582"}, +] + +[package.dependencies] +altgraph = "*" +importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} +macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""} +packaging = ">=22.0" +pefile = {version = ">=2022.5.30", markers = "sys_platform == \"win32\""} +pyinstaller-hooks-contrib = ">=2024.6" +pywin32-ctypes = {version = ">=0.2.1", markers = "sys_platform == \"win32\""} +setuptools = ">=42.0.0" + +[package.extras] +completion = ["argcomplete"] +hook-testing = ["execnet (>=1.5.0)", "psutil", "pytest (>=2.7.3)"] + +[[package]] +name = "pyinstaller-hooks-contrib" +version = "2024.6" +description = "Community maintained hooks for PyInstaller" +optional = true +python-versions = ">=3.7" +files = [ + {file = "pyinstaller_hooks_contrib-2024.6-py2.py3-none-any.whl", hash = "sha256:6cc88dad75261d9e1a7e0c6385139f35dcdbb16640c911a27f6078fe924a38cf"}, + {file = "pyinstaller_hooks_contrib-2024.6.tar.gz", hash = "sha256:3c188b3a79f5cd46d96520df3934642556a1b6ce8988ec5bbce820ada424bc2b"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} +packaging = ">=22.0" +setuptools = ">=42.0.0" + +[[package]] +name = "pylint" +version = "3.2.2" +description = "python code static checker" +optional = true +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.2.2-py3-none-any.whl", hash = "sha256:3f8788ab20bb8383e06dd2233e50f8e08949cfd9574804564803441a4946eab4"}, + {file = "pylint-3.2.2.tar.gz", hash = "sha256:d068ca1dfd735fb92a07d33cb8f288adc0f6bc1287a139ca2425366f7cbe38f8"}, +] + +[package.dependencies] +astroid = ">=3.2.2,<=3.3.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pyproject-hooks" +version = "1.0.0" +description = "Wrappers to call pyproject.toml-based build backend hooks." +optional = true +python-versions = ">=3.7" +files = [ + {file = "pyproject_hooks-1.0.0-py3-none-any.whl", hash = "sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8"}, + {file = "pyproject_hooks-1.0.0.tar.gz", hash = "sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5"}, +] + +[package.dependencies] +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "pytest" +version = "8.1.1" +description = "pytest: simple powerful testing with Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." +optional = true +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "pytest-monitor" +version = "1.6.6" +description = "Pytest plugin for analyzing resource usage." +optional = true +python-versions = ">=3.5" +files = [ + {file = "pytest-monitor-1.6.6.tar.gz", hash = "sha256:b0c44dc44a2d6cdd19f84caa18fafeb1227e2b33bcbd11a2071dacd3763e1b6f"}, + {file = "pytest_monitor-1.6.6-py3-none-any.whl", hash = "sha256:5be37d14aa423fe97af94bd44e3a47a551bd5d94d64921974580bbaadc1c1c94"}, +] + +[package.dependencies] +memory-profiler = ">=0.58" +psutil = ">=5.1.0" +pytest = "*" +requests = "*" +wheel = "*" + +[[package]] +name = "pytest-xdist" +version = "3.6.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, + {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, +] + +[package.dependencies] +execnet = ">=2.1" +pytest = ">=7.0.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyupgrade" +version = "3.15.2" +description = "A tool to automatically upgrade syntax for newer versions." +optional = true +python-versions = ">=3.8.1" +files = [ + {file = "pyupgrade-3.15.2-py2.py3-none-any.whl", hash = "sha256:ce309e0ff8ecb73f56a45f12570be84bbbde9540d13697cacb261a7f595fb1f5"}, + {file = "pyupgrade-3.15.2.tar.gz", hash = "sha256:c488d6896c546d25845712ef6402657123008d56c1063174e27aabe15bd6b4e5"}, +] + +[package.dependencies] +tokenize-rt = ">=5.2.0" + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.2" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, + {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "readme-renderer" +version = "42.0" +description = "readme_renderer is a library for rendering readme descriptions for Warehouse" +optional = true +python-versions = ">=3.8" +files = [ + {file = "readme_renderer-42.0-py3-none-any.whl", hash = "sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d"}, + {file = "readme_renderer-42.0.tar.gz", hash = "sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1"}, +] + +[package.dependencies] +docutils = ">=0.13.1" +nh3 = ">=0.2.14" +Pygments = ">=2.5.1" + +[package.extras] +md = ["cmarkgfm (>=0.8.0)"] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = true +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "rfc3986" +version = "2.0.0" +description = "Validating URI References per RFC 3986" +optional = true +python-versions = ">=3.7" +files = [ + {file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"}, + {file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"}, +] + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "rich" +version = "13.6.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = true +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, + {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "s3transfer" +version = "0.10.1" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, + {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "secretstorage" +version = "3.3.3" +description = "Python bindings to FreeDesktop.org Secret Service API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, + {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, +] + +[package.dependencies] +cryptography = ">=2.0" +jeepney = ">=0.6" + +[[package]] +name = "segno" +version = "1.4.1" +description = "QR Code and Micro QR Code generator for Python 2 and Python 3" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "segno-1.4.1-py2.py3-none-any.whl", hash = "sha256:79d1d7b9c893243411acd031682e0ece007fbd632885c6c650186871be572111"}, + {file = "segno-1.4.1.tar.gz", hash = "sha256:b8e90823b7ab5249044d22f022291bb06e112104779d6339baf0997fad656c9a"}, +] + +[[package]] +name = "setproctitle" +version = "1.3.3" +description = "A Python module to customize the process title" +optional = false +python-versions = ">=3.7" +files = [ + {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:897a73208da48db41e687225f355ce993167079eda1260ba5e13c4e53be7f754"}, + {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c331e91a14ba4076f88c29c777ad6b58639530ed5b24b5564b5ed2fd7a95452"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbbd6c7de0771c84b4aa30e70b409565eb1fc13627a723ca6be774ed6b9d9fa3"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c05ac48ef16ee013b8a326c63e4610e2430dbec037ec5c5b58fcced550382b74"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1342f4fdb37f89d3e3c1c0a59d6ddbedbde838fff5c51178a7982993d238fe4f"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc74e84fdfa96821580fb5e9c0b0777c1c4779434ce16d3d62a9c4d8c710df39"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9617b676b95adb412bb69645d5b077d664b6882bb0d37bfdafbbb1b999568d85"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6a249415f5bb88b5e9e8c4db47f609e0bf0e20a75e8d744ea787f3092ba1f2d0"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:38da436a0aaace9add67b999eb6abe4b84397edf4a78ec28f264e5b4c9d53cd5"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:da0d57edd4c95bf221b2ebbaa061e65b1788f1544977288bdf95831b6e44e44d"}, + {file = "setproctitle-1.3.3-cp310-cp310-win32.whl", hash = "sha256:a1fcac43918b836ace25f69b1dca8c9395253ad8152b625064415b1d2f9be4fb"}, + {file = "setproctitle-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:200620c3b15388d7f3f97e0ae26599c0c378fdf07ae9ac5a13616e933cbd2086"}, + {file = "setproctitle-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:334f7ed39895d692f753a443102dd5fed180c571eb6a48b2a5b7f5b3564908c8"}, + {file = "setproctitle-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:950f6476d56ff7817a8fed4ab207727fc5260af83481b2a4b125f32844df513a"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:195c961f54a09eb2acabbfc90c413955cf16c6e2f8caa2adbf2237d1019c7dd8"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f05e66746bf9fe6a3397ec246fe481096664a9c97eb3fea6004735a4daf867fd"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5901a31012a40ec913265b64e48c2a4059278d9f4e6be628441482dd13fb8b5"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64286f8a995f2cd934082b398fc63fca7d5ffe31f0e27e75b3ca6b4efda4e353"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:184239903bbc6b813b1a8fc86394dc6ca7d20e2ebe6f69f716bec301e4b0199d"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:664698ae0013f986118064b6676d7dcd28fefd0d7d5a5ae9497cbc10cba48fa5"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e5119a211c2e98ff18b9908ba62a3bd0e3fabb02a29277a7232a6fb4b2560aa0"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:417de6b2e214e837827067048f61841f5d7fc27926f2e43954567094051aff18"}, + {file = "setproctitle-1.3.3-cp311-cp311-win32.whl", hash = "sha256:6a143b31d758296dc2f440175f6c8e0b5301ced3b0f477b84ca43cdcf7f2f476"}, + {file = "setproctitle-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a680d62c399fa4b44899094027ec9a1bdaf6f31c650e44183b50d4c4d0ccc085"}, + {file = "setproctitle-1.3.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d4460795a8a7a391e3567b902ec5bdf6c60a47d791c3b1d27080fc203d11c9dc"}, + {file = "setproctitle-1.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bdfd7254745bb737ca1384dee57e6523651892f0ea2a7344490e9caefcc35e64"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477d3da48e216d7fc04bddab67b0dcde633e19f484a146fd2a34bb0e9dbb4a1e"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ab2900d111e93aff5df9fddc64cf51ca4ef2c9f98702ce26524f1acc5a786ae7"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088b9efc62d5aa5d6edf6cba1cf0c81f4488b5ce1c0342a8b67ae39d64001120"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6d50252377db62d6a0bb82cc898089916457f2db2041e1d03ce7fadd4a07381"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:87e668f9561fd3a457ba189edfc9e37709261287b52293c115ae3487a24b92f6"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:287490eb90e7a0ddd22e74c89a92cc922389daa95babc833c08cf80c84c4df0a"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe1c49486109f72d502f8be569972e27f385fe632bd8895f4730df3c87d5ac8"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4a6ba2494a6449b1f477bd3e67935c2b7b0274f2f6dcd0f7c6aceae10c6c6ba3"}, + {file = "setproctitle-1.3.3-cp312-cp312-win32.whl", hash = "sha256:2df2b67e4b1d7498632e18c56722851ba4db5d6a0c91aaf0fd395111e51cdcf4"}, + {file = "setproctitle-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:f38d48abc121263f3b62943f84cbaede05749047e428409c2c199664feb6abc7"}, + {file = "setproctitle-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:816330675e3504ae4d9a2185c46b573105d2310c20b19ea2b4596a9460a4f674"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68f960bc22d8d8e4ac886d1e2e21ccbd283adcf3c43136161c1ba0fa509088e0"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e6e7adff74796ef12753ff399491b8827f84f6c77659d71bd0b35870a17d8f"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53bc0d2358507596c22b02db079618451f3bd720755d88e3cccd840bafb4c41c"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6d20f9541f5f6ac63df553b6d7a04f313947f550eab6a61aa758b45f0d5657"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c1c84beab776b0becaa368254801e57692ed749d935469ac10e2b9b825dbdd8e"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:507e8dc2891021350eaea40a44ddd887c9f006e6b599af8d64a505c0f718f170"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b1067647ac7aba0b44b591936118a22847bda3c507b0a42d74272256a7a798e9"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2e71f6365744bf53714e8bd2522b3c9c1d83f52ffa6324bd7cbb4da707312cd8"}, + {file = "setproctitle-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:7f1d36a1e15a46e8ede4e953abb104fdbc0845a266ec0e99cc0492a4364f8c44"}, + {file = "setproctitle-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9a402881ec269d0cc9c354b149fc29f9ec1a1939a777f1c858cdb09c7a261df"}, + {file = "setproctitle-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ff814dea1e5c492a4980e3e7d094286077054e7ea116cbeda138819db194b2cd"}, + {file = "setproctitle-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:accb66d7b3ccb00d5cd11d8c6e07055a4568a24c95cf86109894dcc0c134cc89"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554eae5a5b28f02705b83a230e9d163d645c9a08914c0ad921df363a07cf39b1"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a911b26264dbe9e8066c7531c0591cfab27b464459c74385b276fe487ca91c12"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2982efe7640c4835f7355fdb4da313ad37fb3b40f5c69069912f8048f77b28c8"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df3f4274b80709d8bcab2f9a862973d453b308b97a0b423a501bcd93582852e3"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:af2c67ae4c795d1674a8d3ac1988676fa306bcfa1e23fddb5e0bd5f5635309ca"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:af4061f67fd7ec01624c5e3c21f6b7af2ef0e6bab7fbb43f209e6506c9ce0092"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:37a62cbe16d4c6294e84670b59cf7adcc73faafe6af07f8cb9adaf1f0e775b19"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a83ca086fbb017f0d87f240a8f9bbcf0809f3b754ee01cec928fff926542c450"}, + {file = "setproctitle-1.3.3-cp38-cp38-win32.whl", hash = "sha256:059f4ce86f8cc92e5860abfc43a1dceb21137b26a02373618d88f6b4b86ba9b2"}, + {file = "setproctitle-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ab92e51cd4a218208efee4c6d37db7368fdf182f6e7ff148fb295ecddf264287"}, + {file = "setproctitle-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c7951820b77abe03d88b114b998867c0f99da03859e5ab2623d94690848d3e45"}, + {file = "setproctitle-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc94cf128676e8fac6503b37763adb378e2b6be1249d207630f83fc325d9b11"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f5d9027eeda64d353cf21a3ceb74bb1760bd534526c9214e19f052424b37e42"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e4a8104db15d3462e29d9946f26bed817a5b1d7a47eabca2d9dc2b995991503"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c32c41ace41f344d317399efff4cffb133e709cec2ef09c99e7a13e9f3b9483c"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbf16381c7bf7f963b58fb4daaa65684e10966ee14d26f5cc90f07049bfd8c1e"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e18b7bd0898398cc97ce2dfc83bb192a13a087ef6b2d5a8a36460311cb09e775"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69d565d20efe527bd8a9b92e7f299ae5e73b6c0470f3719bd66f3cd821e0d5bd"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ddedd300cd690a3b06e7eac90ed4452348b1348635777ce23d460d913b5b63c3"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:415bfcfd01d1fbf5cbd75004599ef167a533395955305f42220a585f64036081"}, + {file = "setproctitle-1.3.3-cp39-cp39-win32.whl", hash = "sha256:21112fcd2195d48f25760f0eafa7a76510871bbb3b750219310cf88b04456ae3"}, + {file = "setproctitle-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:5a740f05d0968a5a17da3d676ce6afefebeeeb5ce137510901bf6306ba8ee002"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6b9e62ddb3db4b5205c0321dd69a406d8af9ee1693529d144e86bd43bcb4b6c0"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e3b99b338598de0bd6b2643bf8c343cf5ff70db3627af3ca427a5e1a1a90dd9"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ae9a02766dad331deb06855fb7a6ca15daea333b3967e214de12cfae8f0ef5"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:200ede6fd11233085ba9b764eb055a2a191fb4ffb950c68675ac53c874c22e20"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0d3a953c50776751e80fe755a380a64cb14d61e8762bd43041ab3f8cc436092f"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5e08e232b78ba3ac6bc0d23ce9e2bee8fad2be391b7e2da834fc9a45129eb87"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1da82c3e11284da4fcbf54957dafbf0655d2389cd3d54e4eaba636faf6d117a"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:aeaa71fb9568ebe9b911ddb490c644fbd2006e8c940f21cb9a1e9425bd709574"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:59335d000c6250c35989394661eb6287187854e94ac79ea22315469ee4f4c244"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3ba57029c9c50ecaf0c92bb127224cc2ea9fda057b5d99d3f348c9ec2855ad3"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d876d355c53d975c2ef9c4f2487c8f83dad6aeaaee1b6571453cb0ee992f55f6"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:224602f0939e6fb9d5dd881be1229d485f3257b540f8a900d4271a2c2aa4e5f4"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d7f27e0268af2d7503386e0e6be87fb9b6657afd96f5726b733837121146750d"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5e7266498cd31a4572378c61920af9f6b4676a73c299fce8ba93afd694f8ae7"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33c5609ad51cd99d388e55651b19148ea99727516132fb44680e1f28dd0d1de9"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:eae8988e78192fd1a3245a6f4f382390b61bce6cfcc93f3809726e4c885fa68d"}, + {file = "setproctitle-1.3.3.tar.gz", hash = "sha256:c913e151e7ea01567837ff037a23ca8740192880198b7fbb90b16d181607caae"}, +] + +[package.extras] +test = ["pytest"] + +[[package]] +name = "setuptools" +version = "70.0.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, + {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = false +python-versions = "*" +files = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] + +[[package]] +name = "tokenize-rt" +version = "5.2.0" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +optional = true +python-versions = ">=3.8" +files = [ + {file = "tokenize_rt-5.2.0-py2.py3-none-any.whl", hash = "sha256:b79d41a65cfec71285433511b50271b05da3584a1da144a0752e9c621a285289"}, + {file = "tokenize_rt-5.2.0.tar.gz", hash = "sha256:9fe80f8a5c1edad2d3ede0f37481cc0cc1538a2f442c9c2f9e4feacd2792d054"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = true +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tomlkit" +version = "0.12.1" +description = "Style preserving TOML library" +optional = true +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, + {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, +] + +[[package]] +name = "twine" +version = "5.1.0" +description = "Collection of utilities for publishing packages on PyPI" +optional = true +python-versions = ">=3.8" +files = [ + {file = "twine-5.1.0-py3-none-any.whl", hash = "sha256:fe1d814395bfe50cfbe27783cb74efe93abeac3f66deaeb6c8390e4e92bacb43"}, + {file = "twine-5.1.0.tar.gz", hash = "sha256:4d74770c88c4fcaf8134d2a6a9d863e40f08255ff7d8e2acb3cbbd57d25f6e9d"}, +] + +[package.dependencies] +importlib-metadata = ">=3.6" +keyring = ">=15.1" +pkginfo = ">=1.8.1" +readme-renderer = ">=35.0" +requests = ">=2.20" +requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0" +rfc3986 = ">=1.4.0" +rich = ">=12.0.0" +urllib3 = ">=1.26.0" + +[[package]] +name = "types-aiofiles" +version = "23.2.0.20240311" +description = "Typing stubs for aiofiles" +optional = true +python-versions = ">=3.8" +files = [ + {file = "types-aiofiles-23.2.0.20240311.tar.gz", hash = "sha256:208e6b090de732739ef74ab8f133c954479c8e77e614f276f9e475a0cc986430"}, + {file = "types_aiofiles-23.2.0.20240311-py3-none-any.whl", hash = "sha256:ed10a8002d88c94220597b77304cf1a1d8cf489c7143fc3ffa2c96488b20fec7"}, +] + +[[package]] +name = "types-cryptography" +version = "3.3.23.2" +description = "Typing stubs for cryptography" +optional = true +python-versions = "*" +files = [ + {file = "types-cryptography-3.3.23.2.tar.gz", hash = "sha256:09cc53f273dd4d8c29fa7ad11fefd9b734126d467960162397bc5e3e604dea75"}, + {file = "types_cryptography-3.3.23.2-py3-none-any.whl", hash = "sha256:b965d548f148f8e87f353ccf2b7bd92719fdf6c845ff7cedf2abb393a0643e4f"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20240311" +description = "Typing stubs for PyYAML" +optional = true +python-versions = ">=3.8" +files = [ + {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"}, + {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"}, +] + +[[package]] +name = "types-setuptools" +version = "70.0.0.20240524" +description = "Typing stubs for setuptools" +optional = true +python-versions = ">=3.8" +files = [ + {file = "types-setuptools-70.0.0.20240524.tar.gz", hash = "sha256:e31fee7b9d15ef53980526579ac6089b3ae51a005a281acf97178e90ac71aff6"}, + {file = "types_setuptools-70.0.0.20240524-py3-none-any.whl", hash = "sha256:8f5379b9948682d72a9ab531fbe52932e84c4f38deda570255f9bae3edd766bc"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "urllib3" +version = "1.26.19" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, + {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "virtualenv" +version = "20.24.5" +description = "Virtual Python Environment builder" +optional = true +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, + {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<4" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "watchdog" +version = "4.0.0" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "wheel" +version = "0.41.2" +description = "A built-package format for Python" +optional = true +python-versions = ">=3.7" +files = [ + {file = "wheel-0.41.2-py3-none-any.whl", hash = "sha256:75909db2664838d015e3d9139004ee16711748a52c8f336b52882266540215d8"}, + {file = "wheel-0.41.2.tar.gz", hash = "sha256:0c5ac5ff2afb79ac23ab82bab027a0be7b5dbcf2e54dc50efe4bf507de1f7985"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[[package]] +name = "zstd" +version = "1.5.5.1" +description = "ZSTD Bindings for Python" +optional = false +python-versions = "*" +files = [ + {file = "zstd-1.5.5.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:555779789bc75cd05089c3ba857f45a0a8c4b87d45e5ced02fec77fa8719237a"}, + {file = "zstd-1.5.5.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:86496bd4830cdb7b4b05a9ce6ce2baee87d327ff90845da4ee308452bfbbed4e"}, + {file = "zstd-1.5.5.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:b487c2e67ed42a4e0d47997d209f4456b01b334023083ef61873f79577c84c62"}, + {file = "zstd-1.5.5.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:45ccd45a5b681088fca1a863ca9236ded5112b8011f1d5bf69e908f5eb32023a"}, + {file = "zstd-1.5.5.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8403fe84207d8b0c7b17bca6c4caad431ac765b1b9b626ad9fae4bb93a64a9d8"}, + {file = "zstd-1.5.5.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:0ab979c6357b8927f0c025ea2f72f25e15d03ce17a8a6c1789e2d5b108bf39ae"}, + {file = "zstd-1.5.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:98cbee6c1b2fe85f02fd475d885f98363c63bc64eebc249d7eb7469a0ff70283"}, + {file = "zstd-1.5.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9962714b89641301029f3832bdf07c20f60b9e64e39e8d7b6253451a82b54f5c"}, + {file = "zstd-1.5.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f59cc92d71537f8082306f75aa403ddb4a4a1069a39f104525673110e4d23f7"}, + {file = "zstd-1.5.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:569f13d0c926ddafceebce8ac73baddfc2bd9cbbbbc922b6b3073338cc43dae6"}, + {file = "zstd-1.5.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba530c44f252016acc6ef906d7d2070c1ad0cfe835c498fdcd37493e4772ac6e"}, + {file = "zstd-1.5.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ee3496ed8fff3add6c6e658b207f18d96474c3db0c28ab7a69623380b1a0a8c"}, + {file = "zstd-1.5.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:530d69bea2791cde8afa7fe988f3a37c3ba37015f6a1d5593c0500f089f3090e"}, + {file = "zstd-1.5.5.1-cp310-cp310-win32.whl", hash = "sha256:cf179e51f447b6a7ff47e449fcb98fb5fe15aedcc90401697cf7c93dd6e4434e"}, + {file = "zstd-1.5.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:5f5e6e0805d710d7509c8d175a467eb89c631a4142b1a630ceeb8e3e3138d152"}, + {file = "zstd-1.5.5.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:022f935a8666e08f0fff6204938a84d9fe4fcd8235a205787275933a07a164fb"}, + {file = "zstd-1.5.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3d15a2d18dac8bcafdde52fdf5d40ecae1f73b7de19b171f42339d2e51346d0"}, + {file = "zstd-1.5.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45b9c67989f50ba63ffa0c50c9eaa037c2d14abacb0813e838ad705135245b4b"}, + {file = "zstd-1.5.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97da6a842ba7e4acf8bba7c596057143ee39b3c4a467196c2096d460e44accd6"}, + {file = "zstd-1.5.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dafd492fb8ee4ae04c81ab00f5f137860e7071f611335dd4cdb1c38bd8f11bc"}, + {file = "zstd-1.5.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9ee83e0bcbfd776200b026b3b9e86c6c86b8f414749f58d87c85dcf456b27066"}, + {file = "zstd-1.5.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ae2fd4bc8ea772a7b5f1acd1cac9e34bb9cd8fcde191f170092fdeea779a3a12"}, + {file = "zstd-1.5.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:edea52a0109f48fd46f4763689d3d356dcafd20ddf6789c559a1bd2e62b40a32"}, + {file = "zstd-1.5.5.1-cp311-cp311-win32.whl", hash = "sha256:88410481209520298ec4430e0d1d57e004c45e0b27c3035674fb182ccd2d8b7b"}, + {file = "zstd-1.5.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:dce18aaefbacf8b133367be86beec670baf68c0420bfcca49be08dbdbf933db6"}, + {file = "zstd-1.5.5.1-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:634dc632f7cf87e95dabf74dcf682e3507bd5cb9dd1bcdb81f92a6521aab0bd2"}, + {file = "zstd-1.5.5.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:608414eb75ead573891d97a1e529848b8f31749d21a440e80838548a19d8c0e6"}, + {file = "zstd-1.5.5.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:384128f7a731e3f45da49976591cec03fc4079e70653df10d9ea43a1d3b49d50"}, + {file = "zstd-1.5.5.1-cp35-cp35m-win32.whl", hash = "sha256:4bce254174ef05cea01021d67e18489d5d08db1168e758b62ecee121572a52a9"}, + {file = "zstd-1.5.5.1-cp35-cp35m-win_amd64.whl", hash = "sha256:3f0ff81232b49d7eb4f4d9e6f92443c9d242c139ad98ffedac0e889568f900ce"}, + {file = "zstd-1.5.5.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a871df41b801a260cc849c2c76f300ebb9d286c4b7a1fd6ce45fe0c91340b767"}, + {file = "zstd-1.5.5.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5a53860dbfbea281eb690ce09cae28967cf1df8e6d7560e4a8bf5b9fcb258147"}, + {file = "zstd-1.5.5.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:a37cbc0580fdfd66c8b3ec65f9af00a4a34e9781b54dfb89f04d301dc375c90a"}, + {file = "zstd-1.5.5.1-cp36-cp36m-win32.whl", hash = "sha256:5531b683539ae1f7b2ad23dacee8a73e5d7eaa6702ea8df5a24bd3318647dee1"}, + {file = "zstd-1.5.5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eeaff418269b41eee8c7971fbba9d32d07d3f6aa26f962a72aff725071096a1b"}, + {file = "zstd-1.5.5.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:8bd6a9050de8bbe844447348372ca17d01bc05207619f6a5d448567d111b5cd9"}, + {file = "zstd-1.5.5.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2ece3d20ef357370584f304407fbd1e4ff9c231209320e08a889b8e3725d56e"}, + {file = "zstd-1.5.5.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:687f9e03dc9f9b8803840425bb23bf6bc700888b4860afcf43c4f238102752d2"}, + {file = "zstd-1.5.5.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a649daac9c8f1b37d29f2b3d0a43f134061659b54877fe4b0da6df2965dc91f"}, + {file = "zstd-1.5.5.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:bddc7e3c3ce31c01fe1edaa7c03c0b9e71eadf4ce1609746d32f86d95a0449e6"}, + {file = "zstd-1.5.5.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:12bf8e04add8bb84f9fe9117f3de6d9394eade6a5a82fe4d6bd95914fc6ef423"}, + {file = "zstd-1.5.5.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9e6a15fa4d2e65c5902ab2a4e41279ac126cb371ce6c3c75ad5789bb20dd1f54"}, + {file = "zstd-1.5.5.1-cp37-cp37m-win32.whl", hash = "sha256:a1c269243a4321beb948635b544ccbe6390846358ace620fd000ab7099011d9c"}, + {file = "zstd-1.5.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:91366e36773241cb4b049a32f4495d33dd274df1eea5b55396f5f3984a3de22e"}, + {file = "zstd-1.5.5.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:d3ce2cb310690994274d133ea7f269dd4b81799fdbce158690556209723d7d4e"}, + {file = "zstd-1.5.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e0c87bfbfa9d852f79c90bcd7426c3ba46cf3285e6984013636d4fc854ba9230"}, + {file = "zstd-1.5.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce6d829d515f272fddb3a87e1a5f32cc0f1a7b0cba24d360c89f4a165b74b"}, + {file = "zstd-1.5.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e05f81f346213b23ed1b12d84fc1f72e65eacd8978e1e88facf185c82bd3d053"}, + {file = "zstd-1.5.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43ec66c4c3a76351c672c6ef9f0ff3412fca9ede0a56d18dddaf6418a93faef8"}, + {file = "zstd-1.5.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:58e554e91e0d49f4f2b2df390cdd0f64aa9b6fd5f4dcb208c094bfd079b30f3a"}, + {file = "zstd-1.5.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:883c6d3b6f5574e1765ca97f4b6a41b69094a41be56175552faebc0e0e43b65e"}, + {file = "zstd-1.5.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d52b6932cab5419c434bccfea3e5640e755369fc9eeb51e3d17e15bf8e8cb103"}, + {file = "zstd-1.5.5.1-cp38-cp38-win32.whl", hash = "sha256:dcaf44270ec88552e969be4dd3359b34aa3065663ccd8168a257c78f150a356c"}, + {file = "zstd-1.5.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:627f12cb7035723c8f3d8d4cefcad6d950ed9cba33fd3eb46bae04ccab479234"}, + {file = "zstd-1.5.5.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:c0dab132c1a5a7cc838a7c3e4e380ad153b9d7bd1fadafabf6cfeb780b916201"}, + {file = "zstd-1.5.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d4ab0a5dd9a41d3b083304beee7ada40ee36431acbeb75132032f4fe5cf0490a"}, + {file = "zstd-1.5.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f6e38f496d287020658c6b4cdb5e815ecc6998889bd0f1f9ab0825f2e3d74ef"}, + {file = "zstd-1.5.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0096c8ee0ed4bfe406bc961019f55552109e19771bfd3eb32d2af56ea27085c"}, + {file = "zstd-1.5.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a0f1527728c50b6aa8f04b47a07580f0ae13cfc6c6d9c96bb0bdf5259487559"}, + {file = "zstd-1.5.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6a64e420c904063c5c3de53c00ec0993ebc0a48cebbef97dc6c768562c5abab5"}, + {file = "zstd-1.5.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:03444e357b7632c64480a81ce7095242dab9d7f8aed317326563ef6c663263eb"}, + {file = "zstd-1.5.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:88b9a10f80d2b87bf8cc1a1fc20a815ed92b5eefdc15cbe8062021f0b5a26a10"}, + {file = "zstd-1.5.5.1-cp39-cp39-win32.whl", hash = "sha256:c91cc1606eb8b3a6fed11faaef4c6e55f1133d70cf0db0c829a2cf9c2ac1dfd9"}, + {file = "zstd-1.5.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:f462e2ebf26dcbfc2c8dddd6b5c56859683f0b77edb8f268e637f7d390a58f74"}, + {file = "zstd-1.5.5.1-pp27-pypy_73-macosx_10_14_x86_64.whl", hash = "sha256:c63f916732e3e309e49ec95e7a0af5d37ff1321f3df2aac10e507bd2b56fceda"}, + {file = "zstd-1.5.5.1-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:50d4850d758bb033df50722cc13ed913b2afcd5385250be4f3ffb79a26b319c3"}, + {file = "zstd-1.5.5.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:0412d666515e78a91ada7e2d78e9dd6b25ddda1b41623b145b99653275c7f3ce"}, + {file = "zstd-1.5.5.1-pp36-pypy36_pp73-macosx_10_14_x86_64.whl", hash = "sha256:0ea91f74869a3cdcb2dde08f8f30ee3da72782c5d1737afed9c703232815864e"}, + {file = "zstd-1.5.5.1-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:477548897dc2b8b595af7bec5f0f55dcba8e9a282335f687cc663b52b171357b"}, + {file = "zstd-1.5.5.1-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:c518938b57a56001ee04dcf79a432152f5bd431416f3b22819ba959bc6054d89"}, + {file = "zstd-1.5.5.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:894a8fe0228d5e24dc286a8d98eb0ce2883f8e2e57f3b7e7619ebdb67967120a"}, + {file = "zstd-1.5.5.1-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:42ec0a4ae9bedd9909fa4f580f3c800469da1b631faeaa94f204e1b66c767fa2"}, + {file = "zstd-1.5.5.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d56dedaa04ab8ecc23492972b12e0bf8529f64c9bceb28c11f43c2369c9768b3"}, + {file = "zstd-1.5.5.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5b060770d796e4c01f5848b345c3cea8a177ab4e7cd95a1963a355042d429e1"}, + {file = "zstd-1.5.5.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fea04805ef6e1cb93d6e5d6bbc7a03bc75a5c733fd352d5aaa81109986fdf1ef"}, + {file = "zstd-1.5.5.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:405c28a35756e57a434bbd7ed29dc5e6490cd2fc2118cbf78b60eaebd134f5e9"}, + {file = "zstd-1.5.5.1-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:c42e630443b01a891277426365a51a2aa630b059ce675992c70c1928d30eccb4"}, + {file = "zstd-1.5.5.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1520d23f24f26cdfbcdb4dc86947446b8f694838bfce728d7fc4b3492397357c"}, + {file = "zstd-1.5.5.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4730737f63cf802321743ded6acc85e747e7f5587c5ba2e51a760bf009f7de"}, + {file = "zstd-1.5.5.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f8c014395e89ad7f67ffe873c0fa1d8e9b4dea8b1801d24e8d9ccd8259858d"}, + {file = "zstd-1.5.5.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5d9ba4f6af0945809bfa3387c6a1208a22937a876521b9ec347e7183d623311b"}, + {file = "zstd-1.5.5.1-pp39-pypy39_pp73-macosx_10_14_x86_64.whl", hash = "sha256:04dfd9f46b0b0b1bc413884fe028b726febcb726d4f66e3cf8afc00c2d9026bf"}, + {file = "zstd-1.5.5.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af52436a2eb5caa925d95461973984cb34d472a963b6be1c0a9f2dfbafad096f"}, + {file = "zstd-1.5.5.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610928b888a2e7ae9d2018ffa814859d47ec4ba75f89a1188ab4eb9232636ee5"}, + {file = "zstd-1.5.5.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee3c9feea99c7f4ff43129a885da056b5aa0cde3f7876bf6397bfb9433f44352"}, + {file = "zstd-1.5.5.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ac9768eeb3c6b530db93de2fec9b363776075dc8a00ee4049612ba5397ca8e"}, + {file = "zstd-1.5.5.1.tar.gz", hash = "sha256:1ef980abf0e1e072b028d2d76ef95b476632651c96225cf30b619c6eef625672"}, +] + +[[package]] +name = "zstd" +version = "1.5.5.1" +description = "ZSTD Bindings for Python" +optional = false +python-versions = "*" +files = [ + {file = "zstd-1.5.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5cd20afab8d13c52d2b2219bf18cc765eae87b8219343bce20647007890adab"}, + {file = "zstd-1.5.5.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0f467ab9b57ab8b4b874e6974d38b802f20406803bb7ec9308df923553cd48f7"}, + {file = "zstd-1.5.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ca7df4319ef56b50b441ee6f6427e94672db811ad890bbb487d42bb0a6eebeb"}, + {file = "zstd-1.5.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:06d91d45b912417e83793a78a3f6668be631a1744a7841ecde4dbfeee309f5ca"}, + {file = "zstd-1.5.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:408d3f56bbf0f091a2bab33a3dee62ac9bafe94bb96940b5e4611e43dcf0ee68"}, + {file = "zstd-1.5.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23f6fb9a631f7e7934cccdfbb69e06a47e1a5e64d69b09a7a2a5cf6e1db5014d"}, + {file = "zstd-1.5.5.1-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:02ecf7d9c41ae9a5685a7fdbf2aebbfb185125c07622819512fed41459233d51"}, + {file = "zstd-1.5.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3d13647ff05da2f8a9c1067b6e4182102fc4b6547c49ecdd3031e7ae2cadc1a"}, + {file = "zstd-1.5.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0f0c81d4241226eab831a500120ddaf9a35dd59593eeb48ed7879e2c722c24c"}, + {file = "zstd-1.5.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:62d2ab79a32bb2d09c74e550636cf4639c1ff3760625f03dccd30e2606b24e66"}, + {file = "zstd-1.5.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec16bc70e3db77a88129c001fb62ce0956a1a3321e0ea4ee389c78d7f616ac50"}, + {file = "zstd-1.5.5.1-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:b8e7f5ee15e7d5d9a5deebc49f62316c901000d8485e68c5f5d602312bfba9ae"}, + {file = "zstd-1.5.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55bcbcca3a342e8298fd23e58f300b13a33e65667e837cb34dd16c8126cdf77a"}, + {file = "zstd-1.5.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c3f06af07fc8976114bb233c426c499b3ac219a6545d163b3c1a4989c40288"}, + {file = "zstd-1.5.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:ab96e69cd4a70c5a4d3c613e85d6492817c35a9ba3921ee86f9d26a788c1522b"}, + {file = "zstd-1.5.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1b111beaf6a4d98b6382e0891287a0d61ae44c3ec819b37f2256a482cc2ccc08"}, + {file = "zstd-1.5.5.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4a434d83bcfa8a0d843451bc073fe60bae9f710dce796c9dba716c66df49c311"}, + {file = "zstd-1.5.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccbafaeda62b98f3d3c70b5e686c6c539c0ea76cab88a9817f1cf3842d4272d"}, + {file = "zstd-1.5.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:59afd7c07ff24dfde876fed3cce5dee64d246e54b394ca6c698b2ec15b0f2454"}, + {file = "zstd-1.5.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:eb17b3401e3fa2c16ecfee4b00cca82fbc71ffd6c64c97619605b2779bc7f5d0"}, + {file = "zstd-1.5.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8612bb288facb0e24d171f7010f2ea2159d8bd7d8243667f707994f398b6d26d"}, + {file = "zstd-1.5.5.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:8cee6c496fe49a27f4a641c75814681ee932201680eb4b07c88afde1b730b633"}, + {file = "zstd-1.5.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b629e950bd0bcd169149af8c9c312d8091a922c07adeef0cd6e9fb06dc11216a"}, + {file = "zstd-1.5.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:86a968e443d735ff4716c18fda91b88d30fee7419c381d26ab2f92e779e69c22"}, + {file = "zstd-1.5.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:7e4a460b169110b16d39eb05249d36c9b09c9d1b4831e72ead13a484b5404029"}, +] + +[package.source] +type = "legacy" +url = "https://pypi.chia.net/simple" +reference = "chia" + +[extras] +dev = ["aiohttp_cors", "black", "build", "coverage", "diff-cover", "flake8", "isort", "lxml", "mypy", "pre-commit", "pre-commit", "py3createtorrent", "pyinstaller", "pylint", "pytest", "pytest-cov", "pytest-mock", "pytest-monitor", "pytest-xdist", "pyupgrade", "twine", "types-aiofiles", "types-cryptography", "types-pyyaml", "types-setuptools"] +legacy-keyring = ["keyrings.cryptfile"] +upnp = ["miniupnpc"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.8.10, <3.13" +content-hash = "07908ee12a9c7de160f147399807b1e7e2f520678c2a9081ab741c29ac6e757c" diff --git a/poetry.toml b/poetry.toml new file mode 100644 index 000000000000..ab1033bd3722 --- /dev/null +++ b/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +in-project = true diff --git a/pyproject.toml b/pyproject.toml index 7f0da13cf5aa..6f95f16b63be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,153 @@ -[build-system] -requires = ["setuptools>=42", "wheel", "setuptools_scm[toml]>=4.1.2"] -build-backend = "setuptools.build_meta" +[tool.poetry] +name = "chia-blockchain" +# see [tool.poetry-dynamic-versioning] +version = "0.0.0" +description = "Chia blockchain full node, farmer, timelord, and wallet." +authors = ["Mariano Sorgente "] +license = "Apache License" +readme = "README.md" +keywords= ["chia", "blockchain", "node"] +homepage = "https://chia.net/" +packages = [{ include = "chia"}, { include = "mozilla-ca/cacert.pem" }] + +[tool.poetry.scripts] +chia = "chia.cmds.chia:main" +chia_daemon = "chia.daemon.server:main" +chia_wallet = "chia.server.start_wallet:main" +chia_full_node = "chia.server.start_full_node:main" +chia_harvester = "chia.server.start_harvester:main" +chia_farmer = "chia.server.start_farmer:main" +chia_introducer = "chia.server.start_introducer:main" +chia_crawler = "chia.seeder.start_crawler:main" +chia_seeder = "chia.seeder.dns_server:main" +chia_timelord = "chia.server.start_timelord:main" +chia_timelord_launcher = "chia.timelord.timelord_launcher:main" +chia_full_node_simulator = "chia.simulator.start_simulator:main" +chia_data_layer = "chia.server.start_data_layer:main" +chia_data_layer_http = "chia.data_layer.data_layer_server:main" +chia_data_layer_s3_plugin = "chia.data_layer.s3_plugin_service:run_server" + +[[tool.poetry.source]] +name = "chia" +url = "https://pypi.chia.net/simple/" +priority = "supplemental" + +[tool.poetry.urls] +"Source" = "https://github.com/Chia-Network/chia-blockchain/" +"Changelog" = "https://github.com/Chia-Network/chia-blockchain/blob/main/CHANGELOG.md" + +[tool.poetry.dependencies] +python = ">=3.8.10, <3.13" +aiofiles = "23.2.1" # Async IO for files +aiohttp = "3.9.4" # HTTP server for full node rpc +aiosqlite = "0.20.0" # asyncio wrapper for sqlite, to store blocks +anyio = "4.3.0" +bitstring = "4.1.4" # Binary data management library +boto3 = "1.34.114" # AWS S3 for Data Layer S3 plugin +chiabip158 = "1.5.1" # bip158-style wallet filters +chiapos = "2.0.4" # proof of space +chia_rs = "0.10.0" +chiavdf = "1.1.4" # timelord and vdf verification +click = "8.1.3" # For the CLI +clvm = "0.9.10" +clvm_tools = "0.4.9" # Currying Program.to other conveniences +clvm_tools_rs = "0.1.40" # Rust implementation of clvm_tools' compiler +colorama = "0.4.6" # Colorizes terminal output +colorlog = "6.8.2" # Adds color to logs +concurrent_log_handler = "0.9.25" # Concurrently log and rotate logs +cryptography = "42.0.5" # Python cryptography library for TLS - keyring conflict +dnslib = "0.9.24" # dns lib +dnspython = "2.6.1" # Query DNS seeds +filelock = "3.14.0" # For reading and writing config multiprocess and multithread safely (non-reentrant locks) +keyring = "25.1.0" # Store keys in MacOS Keychain, Windows Credential Locker +packaging = "24.0" +pip = "24.0" +psutil = [ + {version="5.9.4", markers="platform_machine!='aarch64'"}, + {version="5.9.4", markers="platform_machine=='aarch64'", source="chia"}, +] +pyyaml = "6.0.1" # Used for config file format +setproctitle = "1.3.3" # Gives the chia processes readable names +setuptools = "70.0.0" +sortedcontainers = "2.4.0" # For maintaining sorted mempools +typing-extensions = "4.11.0" # typing backports like Protocol and TypedDict +watchdog = "4.0.0" # Filesystem event watching - watches keyring.yaml +zstd = [ + {version="1.5.5.1", python = "<3.12"}, + {version="1.5.5.1", python = "3.12", source="chia"}, +] +importlib-resources = "6.4.0" +hsms = "0.3.1" +aiohttp_cors = { version = "0.7.0", optional = true } +black = { version = "24.4.2", optional = true } +build = { version = "1.2.1", optional = true } +coverage = { version = "7.5.3", optional = true } +diff-cover = { version = "9.0.0", optional = true } +flake8 = { version = "7.0.0", optional = true } +isort = { version = "5.13.2", optional = true } +# TODO: but... keyrings_cryptfile goes 15 minutes without locking while this does in 75 seconds +"keyrings.cryptfile" = { version = "1.3.9", optional = true } +mypy = { version = "1.10.0", optional = true } +pre-commit = [ { version = "3.5.0", python = "<3.9", optional = true }, { version = "3.7.1", python = ">=3.9", optional = true } ] +py3createtorrent = { version = "1.1.0", optional = true } +pyinstaller = { version = "6.7.0", optional = true } +pylint = { version = "3.2.2", optional = true } +pytest = { version = "8.1.1", optional = true } +pytest-cov = { version = "5.0.0", optional = true } +pytest-mock = { version = "3.14.0", optional = true } +pytest-monitor = { version = "1.6.6", platform = "linux", optional = true } +pytest-xdist = { version = "3.6.1", optional = true } +twine = { version = "5.1.0", optional = true } +types-aiofiles = { version = "23.2.0.20240311", optional = true } +types-cryptography = { version = "3.3.23.2", optional = true } +types-pyyaml = { version = "6.0.12.20240311", optional = true } +types-setuptools = { version = "70.0.0.20240524", optional = true } +lxml = { version = "5.2.2", optional = true } +miniupnpc = { version = "2.2.2", source = "chia", optional = true } +pyupgrade = { version = "3.15.2", optional = true } +# big-o = {version = "0.11.0", optional = true} +# numpy = [ +# {version="1.24.4", python = "<3.9", optional = true}, +# {version="1.26.4", python = ">=3.9", optional = true}] -[tool.setuptools_scm] -fallback_version = "unknown-no-.git-directory" -local_scheme = "no-local-version" + +[tool.poetry.extras] +dev = ["aiohttp_cors", "black", "build", "coverage", "diff-cover", "flake8", "isort", "mypy", "pre-commit", "py3createtorrent", "pyinstaller", "pylint", "pytest", "pytest-cov", "pytest-mock", "pytest-monitor", "pytest-xdist", "twine", "types-aiofiles", "types-cryptography", "types-pyyaml", "types-setuptools", "pyupgrade", "lxml"] +upnp = ["miniupnpc"] +legacy_keyring = ["keyrings.cryptfile"] + +[tool.poetry-dynamic-versioning] +enable = true +vcs = "git" +pattern = "default-unprefixed" +format-jinja = """ + {%- if distance == 0 -%} + {%- if stage is none -%} + {{ base }} + {%- else -%} + {{ base }}-{{stage}}{{revision}} + {%- endif -%} + {%- else -%} + {%- if stage is none -%} + {{ base }}-dev{{ distance }} + {%- else -%} + {{ base }}-{{stage}}{{revision}}.dev{{ distance }} + {%- endif -%} + {%- endif -%} +""" +#format-jinja = """ +# {%- if distance == 0 -%} +# {{ serialize_pep440(base, stage, revision) }} +# {%- else -%} +# {{ serialize_pep440(base, stage, revision, dev=distance) }} +# {%- endif -%} +#""" +style = "semver" +bump = true + +[build-system] +requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"] +build-backend = "poetry_dynamic_versioning.backend" [tool.black] line-length = 120 diff --git a/setup-poetry.sh b/setup-poetry.sh new file mode 100755 index 000000000000..38faa07b5e74 --- /dev/null +++ b/setup-poetry.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +set -o errexit + +USAGE_TEXT="\ +Usage: $0 [-ch] + + -c command for Python + -h display this help and exit +" + +usage() { + echo "${USAGE_TEXT}" +} + +PYTHON_COMMAND=python + +while getopts c:h flag; do + case "${flag}" in + c) PYTHON_COMMAND=${OPTARG} ;; + h) + usage + exit 0 + ;; + *) + echo + usage + exit 1 + ;; + esac +done + +$PYTHON_COMMAND -m venv .penv +.penv/bin/python -m pip install --upgrade pip setuptools wheel +# TODO: maybe make our own zipapp/shiv/pex of poetry and download that? +.penv/bin/python -m pip install poetry diff --git a/setup.py b/setup.py deleted file mode 100644 index ec095861110f..000000000000 --- a/setup.py +++ /dev/null @@ -1,140 +0,0 @@ -from __future__ import annotations - -import os -import sys - -from setuptools import find_packages, setup - -dependencies = [ - "aiofiles==23.2.1", # Async IO for files - "anyio==4.3.0", - "boto3==1.34.114", # AWS S3 for DL s3 plugin - "chiavdf==1.1.4", # timelord and vdf verification - "chiabip158==1.5.1", # bip158-style wallet filters - "chiapos==2.0.4", # proof of space - "clvm==0.9.10", - "clvm_tools==0.4.9", # Currying, Program.to, other conveniences - "chia_rs==0.10.0", - "clvm-tools-rs==0.1.40", # Rust implementation of clvm_tools' compiler - "aiohttp==3.9.4", # HTTP server for full node rpc - "aiosqlite==0.20.0", # asyncio wrapper for sqlite, to store blocks - "bitstring==4.1.4", # Binary data management library - "colorama==0.4.6", # Colorizes terminal output - "colorlog==6.8.2", # Adds color to logs - "concurrent-log-handler==0.9.25", # Concurrently log and rotate logs - "cryptography==42.0.5", # Python cryptography library for TLS - keyring conflict - "filelock==3.14.0", # For reading and writing config multiprocess and multithread safely (non-reentrant locks) - "importlib-resources==6.4.0", - "keyring==25.1.0", # Store keys in MacOS Keychain, Windows Credential Locker - "PyYAML==6.0.1", # Used for config file format - "setproctitle==1.3.3", # Gives the chia processes readable names - "sortedcontainers==2.4.0", # For maintaining sorted mempools - "click==8.1.3", # For the CLI - "dnspython==2.6.1", # Query DNS seeds - "watchdog==4.0.0", # Filesystem event watching - watches keyring.yaml - "dnslib==0.9.24", # dns lib - "typing-extensions==4.11.0", # typing backports like Protocol and TypedDict - "zstd==1.5.5.1", - "packaging==24.0", - "psutil==5.9.4", - "hsms==0.3.1", -] - -upnp_dependencies = [ - "miniupnpc==2.2.2", # Allows users to open ports on their router -] - -dev_dependencies = [ - "build==1.2.1", - "coverage==7.5.3", - "diff-cover==9.0.0", - "pre-commit==3.5.0; python_version < '3.9'", - "pre-commit==3.7.1; python_version >= '3.9'", - "py3createtorrent==1.2.0", - "pylint==3.2.2", - "pytest==8.1.1", - "pytest-cov==5.0.0", - "pytest-mock==3.14.0", - "pytest-xdist==3.6.1", - "pyupgrade==3.15.2", - "twine==5.1.0", - "isort==5.13.2", - "flake8==7.0.0", - "mypy==1.10.0", - "black==24.4.2", - "lxml==5.2.2", - "aiohttp_cors==0.7.0", # For blackd - "pyinstaller==6.7.0", - "types-aiofiles==23.2.0.20240311", - "types-cryptography==3.3.23.2", - "types-pyyaml==6.0.12.20240311", - "types-setuptools==70.0.0.20240524", -] - -legacy_keyring_dependencies = [ - "keyrings.cryptfile==1.3.9", -] - -with open("README.md") as f: - long_description = f.read() - -kwargs = dict( - name="chia-blockchain", - author="Mariano Sorgente", - author_email="mariano@chia.net", - description="Chia blockchain full node, farmer, timelord, and wallet.", - url="https://chia.net/", - license="Apache License", - python_requires=">=3.8.1, <4", - keywords="chia blockchain node", - install_requires=dependencies, - extras_require={ - "dev": dev_dependencies, - "upnp": upnp_dependencies, - "legacy-keyring": legacy_keyring_dependencies, - }, - packages=find_packages(include=["build_scripts", "chia", "chia.*", "mozilla-ca"]), - entry_points={ - "console_scripts": [ - "chia = chia.cmds.chia:main", - "chia_daemon = chia.daemon.server:main", - "chia_wallet = chia.server.start_wallet:main", - "chia_full_node = chia.server.start_full_node:main", - "chia_harvester = chia.server.start_harvester:main", - "chia_farmer = chia.server.start_farmer:main", - "chia_introducer = chia.server.start_introducer:main", - "chia_crawler = chia.seeder.start_crawler:main", - "chia_seeder = chia.seeder.dns_server:main", - "chia_timelord = chia.server.start_timelord:main", - "chia_timelord_launcher = chia.timelord.timelord_launcher:main", - "chia_full_node_simulator = chia.simulator.start_simulator:main", - "chia_data_layer = chia.server.start_data_layer:main", - "chia_data_layer_http = chia.data_layer.data_layer_server:main", - "chia_data_layer_s3_plugin = chia.data_layer.s3_plugin_service:run_server", - ] - }, - package_data={ - "": ["*.clsp", "*.clsp.hex", "*.clvm", "*.clib", "py.typed"], - "chia._tests.cmds.wallet": ["test_offer.toffer"], - "chia._tests.farmer_harvester": ["*.json"], - "chia._tests.tools": ["*.json", "test-blockchain-db.sqlite"], - "chia._tests.util": ["bip39_test_vectors.json", "clvm_generator.bin", "protocol_messages_bytes-v*"], - "chia.util": ["initial-*.yaml", "english.txt"], - "chia.ssl": ["chia_ca.crt", "chia_ca.key", "dst_root_ca.pem"], - "mozilla-ca": ["cacert.pem"], - }, - long_description=long_description, - long_description_content_type="text/markdown", - zip_safe=False, - project_urls={ - "Source": "https://github.com/Chia-Network/chia-blockchain/", - "Changelog": "https://github.com/Chia-Network/chia-blockchain/blob/main/CHANGELOG.md", - }, -) - -if "setup_file" in sys.modules: - # include dev deps in regular deps when run in snyk - dependencies.extend(dev_dependencies) - -if len(os.environ.get("CHIA_SKIP_SETUP", "")) < 1: - setup(**kwargs) # type: ignore From 40c961e6c600cf5172640fd9d0d7a75ca1177a8a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Jul 2024 13:08:11 -0700 Subject: [PATCH 53/77] build(deps): bump watchdog from 4.0.0 to 4.0.1 (#18153) Bumps [watchdog](https://github.com/gorakhargosh/watchdog) from 4.0.0 to 4.0.1. - [Release notes](https://github.com/gorakhargosh/watchdog/releases) - [Changelog](https://github.com/gorakhargosh/watchdog/blob/master/changelog.rst) - [Commits](https://github.com/gorakhargosh/watchdog/compare/v4.0.0...v4.0.1) --- updated-dependencies: - dependency-name: watchdog dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 65 ++++++++++++++++++++++++++------------------------ pyproject.toml | 2 +- 2 files changed, 35 insertions(+), 32 deletions(-) diff --git a/poetry.lock b/poetry.lock index c0225ee7a6f4..65d848c442b9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3280,40 +3280,43 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "watchdog" -version = "4.0.0" +version = "4.0.1" description = "Filesystem events monitoring" optional = false python-versions = ">=3.8" files = [ - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, - {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, - {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, - {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, - {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, - {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, - {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, - {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, + {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, + {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, + {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, + {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, ] [package.extras] @@ -3594,4 +3597,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.8.10, <3.13" -content-hash = "07908ee12a9c7de160f147399807b1e7e2f520678c2a9081ab741c29ac6e757c" +content-hash = "9cebea6c1ee21f4a62532597cee535b2ae9f85ec12917992cee13315a86ec5fe" diff --git a/pyproject.toml b/pyproject.toml index 6f95f16b63be..bd489828f298 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,7 +71,7 @@ setproctitle = "1.3.3" # Gives the chia processes readable names setuptools = "70.0.0" sortedcontainers = "2.4.0" # For maintaining sorted mempools typing-extensions = "4.11.0" # typing backports like Protocol and TypedDict -watchdog = "4.0.0" # Filesystem event watching - watches keyring.yaml +watchdog = "4.0.1" # Filesystem event watching - watches keyring.yaml zstd = [ {version="1.5.5.1", python = "<3.12"}, {version="1.5.5.1", python = "3.12", source="chia"}, From 9aed61d4c4ccf6536ecd8fd6512a672e8f29c166 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Jul 2024 15:34:41 -0700 Subject: [PATCH 54/77] Bump zipp from 3.17.0 to 3.19.1 (#18291) Bumps [zipp](https://github.com/jaraco/zipp) from 3.17.0 to 3.19.1. - [Release notes](https://github.com/jaraco/zipp/releases) - [Changelog](https://github.com/jaraco/zipp/blob/main/NEWS.rst) - [Commits](https://github.com/jaraco/zipp/compare/v3.17.0...v3.19.1) --- updated-dependencies: - dependency-name: zipp dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 65d848c442b9..44bb9ffa0604 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3441,18 +3441,18 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.17.0" +version = "3.19.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"}, + {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [[package]] name = "zstd" From a72298f302f37d51e016bb603a628f20b8962c5e Mon Sep 17 00:00:00 2001 From: ChiaAutomation <85647627+ChiaAutomation@users.noreply.github.com> Date: Thu, 11 Jul 2024 13:20:46 -0500 Subject: [PATCH 55/77] Update Managed Files (#18290) Update dep-review --- .github/workflows/dependency-review.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 500c8abce12c..53a3bbdd86e2 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -21,5 +21,5 @@ jobs: - name: "Dependency Review" uses: actions/dependency-review-action@v4 with: - allow-dependencies-licenses: pkg:pypi/pylint + allow-dependencies-licenses: pkg:pypi/pylint, pkg:pypi/pyinstaller deny-licenses: AGPL-1.0-only, AGPL-1.0-or-later, AGPL-1.0-or-later, AGPL-3.0-or-later, GPL-1.0-only, GPL-1.0-or-later, GPL-2.0-only, GPL-2.0-or-later, GPL-3.0-only, GPL-3.0-or-later From 4e0ea3d40d0b61062918a1d6be0c9da4903fa117 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Thu, 11 Jul 2024 14:21:04 -0400 Subject: [PATCH 56/77] remove dependency on twine (#18253) * remove dependency on twine no longer used since https://github.com/Chia-Network/chia-blockchain/commit/34314c1abb36b1088898cd83b7809133b93eb834 * remove twine (from poetry this time) --- poetry.lock | 177 +------------------------------------------------ pyproject.toml | 1 - 2 files changed, 2 insertions(+), 176 deletions(-) diff --git a/poetry.lock b/poetry.lock index 44bb9ffa0604..088618ad719e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1270,17 +1270,6 @@ idna = ["idna (>=3.6)"] trio = ["trio (>=0.23)"] wmi = ["wmi (>=1.5.1)"] -[[package]] -name = "docutils" -version = "0.20.1" -description = "Docutils -- Python Documentation Utilities" -optional = true -python-versions = ">=3.7" -files = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, -] - [[package]] name = "exceptiongroup" version = "1.1.3" @@ -1844,30 +1833,6 @@ files = [ [package.dependencies] altgraph = ">=0.17" -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = true -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - [[package]] name = "markupsafe" version = "2.1.3" @@ -1948,17 +1913,6 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = true -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - [[package]] name = "memory-profiler" version = "0.61.0" @@ -2195,31 +2149,6 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "nh3" -version = "0.2.14" -description = "Ammonia HTML sanitizer Python binding" -optional = true -python-versions = "*" -files = [ - {file = "nh3-0.2.14-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a"}, - {file = "nh3-0.2.14-cp37-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6"}, - {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4"}, - {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5"}, - {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d"}, - {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6"}, - {file = "nh3-0.2.14-cp37-abi3-win32.whl", hash = "sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873"}, - {file = "nh3-0.2.14-cp37-abi3-win_amd64.whl", hash = "sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e"}, - {file = "nh3-0.2.14.tar.gz", hash = "sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4"}, -] - [[package]] name = "nodeenv" version = "1.8.0" @@ -2278,20 +2207,6 @@ files = [ {file = "pip-24.0.tar.gz", hash = "sha256:ea9bd1a847e8c5774a5777bb398c19e80bcd4e2aa16a4b301b718fe6f593aba2"}, ] -[[package]] -name = "pkginfo" -version = "1.9.6" -description = "Query metadata from sdists / bdists / installed packages." -optional = true -python-versions = ">=3.6" -files = [ - {file = "pkginfo-1.9.6-py3-none-any.whl", hash = "sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546"}, - {file = "pkginfo-1.9.6.tar.gz", hash = "sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046"}, -] - -[package.extras] -testing = ["pytest", "pytest-cov"] - [[package]] name = "platformdirs" version = "3.11.0" @@ -2837,25 +2752,6 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] -[[package]] -name = "readme-renderer" -version = "42.0" -description = "readme_renderer is a library for rendering readme descriptions for Warehouse" -optional = true -python-versions = ">=3.8" -files = [ - {file = "readme_renderer-42.0-py3-none-any.whl", hash = "sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d"}, - {file = "readme_renderer-42.0.tar.gz", hash = "sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1"}, -] - -[package.dependencies] -docutils = ">=0.13.1" -nh3 = ">=0.2.14" -Pygments = ">=2.5.1" - -[package.extras] -md = ["cmarkgfm (>=0.8.0)"] - [[package]] name = "requests" version = "2.32.3" @@ -2877,53 +2773,6 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "requests-toolbelt" -version = "1.0.0" -description = "A utility belt for advanced users of python-requests" -optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, - {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, -] - -[package.dependencies] -requests = ">=2.0.1,<3.0.0" - -[[package]] -name = "rfc3986" -version = "2.0.0" -description = "Validating URI References per RFC 3986" -optional = true -python-versions = ">=3.7" -files = [ - {file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"}, - {file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"}, -] - -[package.extras] -idna2008 = ["idna"] - -[[package]] -name = "rich" -version = "13.6.0" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = true -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, - {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - [[package]] name = "s3transfer" version = "0.10.1" @@ -3148,28 +2997,6 @@ files = [ {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, ] -[[package]] -name = "twine" -version = "5.1.0" -description = "Collection of utilities for publishing packages on PyPI" -optional = true -python-versions = ">=3.8" -files = [ - {file = "twine-5.1.0-py3-none-any.whl", hash = "sha256:fe1d814395bfe50cfbe27783cb74efe93abeac3f66deaeb6c8390e4e92bacb43"}, - {file = "twine-5.1.0.tar.gz", hash = "sha256:4d74770c88c4fcaf8134d2a6a9d863e40f08255ff7d8e2acb3cbbd57d25f6e9d"}, -] - -[package.dependencies] -importlib-metadata = ">=3.6" -keyring = ">=15.1" -pkginfo = ">=1.8.1" -readme-renderer = ">=35.0" -requests = ">=2.20" -requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0" -rfc3986 = ">=1.4.0" -rich = ">=12.0.0" -urllib3 = ">=1.26.0" - [[package]] name = "types-aiofiles" version = "23.2.0.20240311" @@ -3590,11 +3417,11 @@ url = "https://pypi.chia.net/simple" reference = "chia" [extras] -dev = ["aiohttp_cors", "black", "build", "coverage", "diff-cover", "flake8", "isort", "lxml", "mypy", "pre-commit", "pre-commit", "py3createtorrent", "pyinstaller", "pylint", "pytest", "pytest-cov", "pytest-mock", "pytest-monitor", "pytest-xdist", "pyupgrade", "twine", "types-aiofiles", "types-cryptography", "types-pyyaml", "types-setuptools"] +dev = ["aiohttp_cors", "black", "build", "coverage", "diff-cover", "flake8", "isort", "lxml", "mypy", "pre-commit", "pre-commit", "py3createtorrent", "pyinstaller", "pylint", "pytest", "pytest-cov", "pytest-mock", "pytest-monitor", "pytest-xdist", "pyupgrade", "types-aiofiles", "types-cryptography", "types-pyyaml", "types-setuptools"] legacy-keyring = ["keyrings.cryptfile"] upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.8.10, <3.13" -content-hash = "9cebea6c1ee21f4a62532597cee535b2ae9f85ec12917992cee13315a86ec5fe" +content-hash = "be1a0e30d25c49526bcd13c69e8fa4361710836ff2d5a166d1587c2a5dd33dc6" diff --git a/pyproject.toml b/pyproject.toml index bd489828f298..763ea9914591 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,6 @@ pytest-cov = { version = "5.0.0", optional = true } pytest-mock = { version = "3.14.0", optional = true } pytest-monitor = { version = "1.6.6", platform = "linux", optional = true } pytest-xdist = { version = "3.6.1", optional = true } -twine = { version = "5.1.0", optional = true } types-aiofiles = { version = "23.2.0.20240311", optional = true } types-cryptography = { version = "3.3.23.2", optional = true } types-pyyaml = { version = "6.0.12.20240311", optional = true } From f41a49479a1919c6a6cb7c26f05a63e2b5e0f16a Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Thu, 11 Jul 2024 14:21:20 -0400 Subject: [PATCH 57/77] remove unclosed client session warning ignore (#18214) --- pytest.ini | 1 - 1 file changed, 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index 78bb47396431..952f99343ebc 100644 --- a/pytest.ini +++ b/pytest.ini @@ -16,7 +16,6 @@ markers = testpaths = chia/_tests/ filterwarnings = error - ignore:Implicitly cleaning up:ResourceWarning ignore:unclosed Date: Thu, 11 Jul 2024 11:23:15 -0700 Subject: [PATCH 58/77] [CHIA-712] Fix CATWallet pending_change calculation (#18126) * Add the concept of 'action scopes' * Add `WalletActionScope` * Fix CATWallet pending_change calculation * Add the concept of 'action scopes' * pylint and test coverage * add try/finally * add try/except * Undo giving a variable a name * Fix CRCAT test * Fix trade tests * Fix cat test --- .../wallet/cat_wallet/test_cat_wallet.py | 4 ++-- chia/_tests/wallet/cat_wallet/test_trades.py | 21 +++++++++++++++++++ .../_tests/wallet/vc_wallet/test_vc_wallet.py | 6 ++++++ chia/wallet/cat_wallet/cat_wallet.py | 9 ++++++-- 4 files changed, 36 insertions(+), 4 deletions(-) diff --git a/chia/_tests/wallet/cat_wallet/test_cat_wallet.py b/chia/_tests/wallet/cat_wallet/test_cat_wallet.py index c81506dc3a2c..2cf918ef040b 100644 --- a/chia/_tests/wallet/cat_wallet/test_cat_wallet.py +++ b/chia/_tests/wallet/cat_wallet/test_cat_wallet.py @@ -208,7 +208,7 @@ async def test_cat_spend(wallet_environments: WalletTestFramework) -> None: "confirmed_wallet_balance": 0, "unconfirmed_wallet_balance": 100, "spendable_balance": 0, - "pending_change": 0, + "pending_change": 100, # A little weird but technically correct "max_send_amount": 0, "unspent_coin_count": 0, "pending_coin_removal_count": 1, # The ephemeral eve spend @@ -226,7 +226,7 @@ async def test_cat_spend(wallet_environments: WalletTestFramework) -> None: "cat": { "confirmed_wallet_balance": 100, "spendable_balance": 100, - "pending_change": 0, + "pending_change": -100, "max_send_amount": 100, "unspent_coin_count": 1, "pending_coin_removal_count": -1, diff --git a/chia/_tests/wallet/cat_wallet/test_trades.py b/chia/_tests/wallet/cat_wallet/test_trades.py index 132ff086adc2..d0af2f49eda7 100644 --- a/chia/_tests/wallet/cat_wallet/test_trades.py +++ b/chia/_tests/wallet/cat_wallet/test_trades.py @@ -515,6 +515,7 @@ async def test_cat_trades( "unconfirmed_wallet_balance": 0, "spendable_balance": 0, "max_send_amount": 0, + "pending_change": 0, "unspent_coin_count": 0, } if credential_restricted @@ -550,6 +551,7 @@ async def test_cat_trades( "new cat": { "unconfirmed_wallet_balance": -2, "pending_coin_removal_count": 1, + "pending_change": 98, "<=#spendable_balance": -2, "<=#max_send_amount": -2, }, @@ -576,6 +578,7 @@ async def test_cat_trades( "new cat": { "confirmed_wallet_balance": -2, "pending_coin_removal_count": -1, + "pending_change": -98, ">#spendable_balance": 0, ">#max_send_amount": 0, }, @@ -607,6 +610,7 @@ async def test_cat_trades( "new cat": { "unconfirmed_wallet_balance": 2, "pending_coin_removal_count": 1, + "pending_change": 2, # This is a little weird but fits the current definition }, "vc": { "pending_coin_removal_count": 1, @@ -622,6 +626,7 @@ async def test_cat_trades( "confirmed_wallet_balance": 2, "spendable_balance": 2, "max_send_amount": 2, + "pending_change": -2, "unspent_coin_count": 1, "pending_coin_removal_count": -1, }, @@ -889,6 +894,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "new cat": { "unconfirmed_wallet_balance": -6, "<=#spendable_balance": -6, + "pending_change": 92, "<=#max_send_amount": -6, "pending_coin_removal_count": 1, }, @@ -911,6 +917,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num }, "new cat": { "confirmed_wallet_balance": -6, + "pending_change": -92, ">#spendable_balance": 0, ">#max_send_amount": 0, "pending_coin_removal_count": -1, @@ -946,6 +953,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "new cat": { "unconfirmed_wallet_balance": 6, "pending_coin_removal_count": 1, + "pending_change": 6, # This is a little weird but fits the current definition }, "vc": { "pending_coin_removal_count": 1, @@ -961,6 +969,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "confirmed_wallet_balance": 6, "spendable_balance": 6, "max_send_amount": 6, + "pending_change": -6, "unspent_coin_count": 1, "pending_coin_removal_count": -1, }, @@ -1109,12 +1118,14 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "unconfirmed_wallet_balance": -8, "<=#spendable_balance": -8, "<=#max_send_amount": -8, + "pending_change": 1, "pending_coin_removal_count": 2, # For the first time, we're using two coins in an offer }, "new cat": { "unconfirmed_wallet_balance": -9, "<=#spendable_balance": -9, "<=#max_send_amount": -9, + "pending_change": 83, "pending_coin_removal_count": 1, }, **( @@ -1138,6 +1149,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "confirmed_wallet_balance": -8, ">#spendable_balance": 0, ">#max_send_amount": 0, + "pending_change": -1, "pending_coin_removal_count": -2, "unspent_coin_count": -1, }, @@ -1145,6 +1157,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "confirmed_wallet_balance": -9, ">#spendable_balance": 0, ">#max_send_amount": 0, + "pending_change": -83, "pending_coin_removal_count": -1, }, **( @@ -1178,6 +1191,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "cat": { "unconfirmed_wallet_balance": 8, "pending_coin_removal_count": 1, + "pending_change": 8, # This is a little weird but fits the current definition }, "vc": { "pending_coin_removal_count": 1, @@ -1193,6 +1207,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "confirmed_wallet_balance": 8, "spendable_balance": 8, "max_send_amount": 8, + "pending_change": -8, "unspent_coin_count": 1, "pending_coin_removal_count": -1, }, @@ -1223,6 +1238,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "new cat": { "unconfirmed_wallet_balance": 9, "pending_coin_removal_count": 1, + "pending_change": 9, # This is a little weird but fits the current definition }, "vc": { "pending_coin_removal_count": 1, @@ -1238,6 +1254,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "confirmed_wallet_balance": 9, "spendable_balance": 9, "max_send_amount": 9, + "pending_change": -9, "unspent_coin_count": 1, "pending_coin_removal_count": -1, }, @@ -1480,6 +1497,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "unconfirmed_wallet_balance": -15, "<=#spendable_balance": -15, "<=#max_send_amount": -15, + "pending_change": 68, "pending_coin_removal_count": 1, }, **( @@ -1509,6 +1527,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "confirmed_wallet_balance": -15, ">#spendable_balance": 0, ">#max_send_amount": 0, + "pending_change": -68, "pending_coin_removal_count": -1, }, **( @@ -1542,6 +1561,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "new cat": { "unconfirmed_wallet_balance": 15, "pending_coin_removal_count": 1, + "pending_change": 15, # This is a little weird but fits the current definition }, "vc": { "pending_coin_removal_count": 1, @@ -1557,6 +1577,7 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num "confirmed_wallet_balance": 15, "spendable_balance": 15, "max_send_amount": 15, + "pending_change": -15, "unspent_coin_count": 1, "pending_coin_removal_count": -1, }, diff --git a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py index b308da9f2d7d..80206d4cdf9a 100644 --- a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py +++ b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py @@ -377,6 +377,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: "unconfirmed_wallet_balance": -90, "spendable_balance": -100, "max_send_amount": -100, + "pending_change": 10, "pending_coin_removal_count": 1, }, }, @@ -393,6 +394,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: "confirmed_wallet_balance": -90, "spendable_balance": 10, "max_send_amount": 10, + "pending_change": -10, "pending_coin_removal_count": -1, }, }, @@ -482,6 +484,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: }, "crcat": { "unconfirmed_wallet_balance": 90, + "pending_change": 90, "pending_coin_removal_count": 1, }, }, @@ -498,6 +501,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: "confirmed_wallet_balance": 90, "spendable_balance": 90, "max_send_amount": 90, + "pending_change": -90, "unspent_coin_count": 1, "pending_coin_removal_count": -1, }, @@ -551,6 +555,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: "unconfirmed_wallet_balance": -50, "spendable_balance": -90, "max_send_amount": -90, + "pending_change": 40, "pending_coin_removal_count": 1, }, }, @@ -567,6 +572,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: "confirmed_wallet_balance": -50, # should go straight to confirmed because we sent to ourselves "spendable_balance": 40, "max_send_amount": 40, + "pending_change": -40, "pending_coin_removal_count": -1, "unspent_coin_count": 1, }, diff --git a/chia/wallet/cat_wallet/cat_wallet.py b/chia/wallet/cat_wallet/cat_wallet.py index 2b6a263aeb38..2e8131705899 100644 --- a/chia/wallet/cat_wallet/cat_wallet.py +++ b/chia/wallet/cat_wallet/cat_wallet.py @@ -472,7 +472,7 @@ async def get_pending_change_balance(self) -> uint64: unconfirmed_tx = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(self.id()) addition_amount = 0 for record in unconfirmed_tx: - if not record.is_in_mempool(): + if not record.is_in_mempool() and record.spend_bundle is not None: continue our_spend = False for coin in record.removals: @@ -484,7 +484,12 @@ async def get_pending_change_balance(self) -> uint64: continue for coin in record.additions: - if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()): + hint_dict = { + coin_id: bytes32(memos[0]) + for coin_id, memos in record.memos + if len(memos) > 0 and len(memos[0]) == 32 + } + if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id(), hint_dict=hint_dict): addition_amount += coin.amount return uint64(addition_amount) From cf7b54966ecbc78976a2296d9acaa1954f424146 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Thu, 11 Jul 2024 16:44:36 -0400 Subject: [PATCH 59/77] root hash, does `None` mean empty or unspecified? (#18244) * mark some cases where a none root hash means unspecified, not empty * propagate, hint for help tracking, etc * cp * enum trick * tidy * tidy * fixup --- chia/cmds/data_funcs.py | 3 ++ chia/data_layer/data_layer.py | 28 ++++++++--- chia/data_layer/data_layer_util.py | 21 +++++++- chia/data_layer/data_store.py | 77 ++++++++++++++++++++++-------- chia/rpc/data_layer_rpc_api.py | 48 ++++++++++++------- 5 files changed, 133 insertions(+), 44 deletions(-) diff --git a/chia/cmds/data_funcs.py b/chia/cmds/data_funcs.py index c3d16e2b21db..9afb586b96f3 100644 --- a/chia/cmds/data_funcs.py +++ b/chia/cmds/data_funcs.py @@ -50,6 +50,7 @@ async def get_value_cmd( rpc_port: Optional[int], store_id: bytes32, key: str, + # NOTE: being outside the rpc, this retains the none-means-unspecified semantics root_hash: Optional[bytes32], fingerprint: Optional[int], ) -> None: @@ -137,6 +138,7 @@ async def submit_all_pending_roots_cmd( async def get_keys_cmd( rpc_port: Optional[int], store_id: bytes32, + # NOTE: being outside the rpc, this retains the none-means-unspecified semantics root_hash: Optional[bytes32], fingerprint: Optional[int], page: Optional[int], @@ -154,6 +156,7 @@ async def get_keys_cmd( async def get_keys_values_cmd( rpc_port: Optional[int], store_id: bytes32, + # NOTE: being outside the rpc, this retains the none-means-unspecified semantics root_hash: Optional[bytes32], fingerprint: Optional[int], page: Optional[int], diff --git a/chia/data_layer/data_layer.py b/chia/data_layer/data_layer.py index 8dcc940099b4..ef2c920edf23 100644 --- a/chia/data_layer/data_layer.py +++ b/chia/data_layer/data_layer.py @@ -51,8 +51,10 @@ Subscription, SyncStatus, TerminalNode, + Unspecified, UnsubscribeData, leaf_hash, + unspecified, ) from chia.data_layer.data_layer_wallet import DataLayerWallet, Mirror, SingletonRecord, verify_offer from chia.data_layer.data_store import DataStore @@ -384,7 +386,7 @@ async def get_key_value_hash( self, store_id: bytes32, key: bytes, - root_hash: Optional[bytes32] = None, + root_hash: Union[bytes32, Unspecified] = unspecified, ) -> bytes32: await self._update_confirmation_status(store_id=store_id) @@ -392,7 +394,9 @@ async def get_key_value_hash( node = await self.data_store.get_node_by_key(store_id=store_id, key=key, root_hash=root_hash) return node.hash - async def get_value(self, store_id: bytes32, key: bytes, root_hash: Optional[bytes32] = None) -> bytes: + async def get_value( + self, store_id: bytes32, key: bytes, root_hash: Union[bytes32, Unspecified] = unspecified + ) -> bytes: await self._update_confirmation_status(store_id=store_id) async with self.data_store.transaction(): @@ -400,7 +404,11 @@ async def get_value(self, store_id: bytes32, key: bytes, root_hash: Optional[byt res = await self.data_store.get_node_by_key(store_id=store_id, key=key, root_hash=root_hash) return res.value - async def get_keys_values(self, store_id: bytes32, root_hash: Optional[bytes32]) -> List[TerminalNode]: + async def get_keys_values( + self, + store_id: bytes32, + root_hash: Union[bytes32, Unspecified], + ) -> List[TerminalNode]: await self._update_confirmation_status(store_id=store_id) res = await self.data_store.get_keys_values(store_id, root_hash) @@ -411,7 +419,7 @@ async def get_keys_values(self, store_id: bytes32, root_hash: Optional[bytes32]) async def get_keys_values_paginated( self, store_id: bytes32, - root_hash: Optional[bytes32], + root_hash: Union[bytes32, Unspecified], page: int, max_page_size: Optional[int] = None, ) -> KeysValuesPaginationData: @@ -422,7 +430,7 @@ async def get_keys_values_paginated( res = await self.data_store.get_keys_values_paginated(store_id, page, max_page_size, root_hash) return res - async def get_keys(self, store_id: bytes32, root_hash: Optional[bytes32]) -> List[bytes]: + async def get_keys(self, store_id: bytes32, root_hash: Union[bytes32, Unspecified]) -> List[bytes]: await self._update_confirmation_status(store_id=store_id) res = await self.data_store.get_keys(store_id, root_hash) @@ -431,7 +439,7 @@ async def get_keys(self, store_id: bytes32, root_hash: Optional[bytes32]) -> Lis async def get_keys_paginated( self, store_id: bytes32, - root_hash: Optional[bytes32], + root_hash: Union[bytes32, Unspecified], page: int, max_page_size: Optional[int] = None, ) -> KeysPaginationData: @@ -820,7 +828,13 @@ async def get_kv_diff(self, store_id: bytes32, hash_1: bytes32, hash_2: bytes32) return await self.data_store.get_kv_diff(store_id, hash_1, hash_2) async def get_kv_diff_paginated( - self, store_id: bytes32, hash_1: bytes32, hash_2: bytes32, page: int, max_page_size: Optional[int] = None + self, + store_id: bytes32, + # NOTE: empty is expressed as zeros + hash_1: bytes32, + hash_2: bytes32, + page: int, + max_page_size: Optional[int] = None, ) -> KVDiffPaginationData: if max_page_size is None: max_page_size = 40 * 1024 * 1024 diff --git a/chia/data_layer/data_layer_util.py b/chia/data_layer/data_layer_util.py index 4dfccc39d10c..ba1bd8e19c1c 100644 --- a/chia/data_layer/data_layer_util.py +++ b/chia/data_layer/data_layer_util.py @@ -2,7 +2,7 @@ import dataclasses from dataclasses import dataclass, field -from enum import IntEnum +from enum import Enum, IntEnum from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union # TODO: remove or formalize this @@ -86,7 +86,11 @@ async def _debug_dump(db: DBWrapper2, description: str = "") -> None: print(f" {dict(row)}") -async def _dot_dump(data_store: DataStore, store_id: bytes32, root_hash: bytes32) -> str: +async def _dot_dump( + data_store: DataStore, + store_id: bytes32, + root_hash: bytes32, +) -> str: terminal_nodes = await data_store.get_keys_values(store_id=store_id, root_hash=root_hash) internal_nodes = await data_store.get_internal_nodes(store_id=store_id, root_hash=root_hash) @@ -325,6 +329,19 @@ def other_child_side(self, hash: bytes32) -> Side: raise Exception("provided hash not present") +class Unspecified(Enum): + # not beautiful, improve when a better way is known + # https://github.com/python/typing/issues/236#issuecomment-229515556 + + instance = None + + def __repr__(self) -> str: + return "Unspecified" + + +unspecified = Unspecified.instance + + @dataclass(frozen=True) class Root: store_id: bytes32 diff --git a/chia/data_layer/data_store.py b/chia/data_layer/data_store.py index a19b66252ac8..a8c68992db18 100644 --- a/chia/data_layer/data_store.py +++ b/chia/data_layer/data_store.py @@ -31,11 +31,13 @@ Status, Subscription, TerminalNode, + Unspecified, get_hashes_for_page, internal_hash, key_hash, leaf_hash, row_to_node, + unspecified, ) from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 @@ -785,13 +787,20 @@ async def get_keys_values_cursor( {"root_hash": root_hash, "node_type": NodeType.TERMINAL}, ) - async def get_keys_values(self, store_id: bytes32, root_hash: Optional[bytes32] = None) -> List[TerminalNode]: + async def get_keys_values( + self, + store_id: bytes32, + root_hash: Union[bytes32, Unspecified] = unspecified, + ) -> List[TerminalNode]: async with self.db_wrapper.reader() as reader: - if root_hash is None: + resolved_root_hash: Optional[bytes32] + if root_hash is unspecified: root = await self.get_tree_root(store_id=store_id) - root_hash = root.node_hash + resolved_root_hash = root.node_hash + else: + resolved_root_hash = root_hash - cursor = await self.get_keys_values_cursor(reader, root_hash) + cursor = await self.get_keys_values_cursor(reader, resolved_root_hash) terminal_nodes: List[TerminalNode] = [] async for row in cursor: if row["depth"] > 62: @@ -814,14 +823,19 @@ async def get_keys_values(self, store_id: bytes32, root_hash: Optional[bytes32] return terminal_nodes async def get_keys_values_compressed( - self, store_id: bytes32, root_hash: Optional[bytes32] = None + self, + store_id: bytes32, + root_hash: Union[bytes32, Unspecified] = unspecified, ) -> KeysValuesCompressed: async with self.db_wrapper.reader() as reader: - if root_hash is None: + resolved_root_hash: Optional[bytes32] + if root_hash is unspecified: root = await self.get_tree_root(store_id=store_id) - root_hash = root.node_hash + resolved_root_hash = root.node_hash + else: + resolved_root_hash = root_hash - cursor = await self.get_keys_values_cursor(reader, root_hash) + cursor = await self.get_keys_values_cursor(reader, resolved_root_hash) keys_values_hashed: Dict[bytes32, bytes32] = {} key_hash_to_length: Dict[bytes32, int] = {} leaf_hash_to_length: Dict[bytes32, int] = {} @@ -835,7 +849,7 @@ async def get_keys_values_compressed( key_hash_to_length[key_hash(node.key)] = len(node.key) leaf_hash_to_length[leaf_hash(node.key, node.value)] = len(node.key) + len(node.value) - return KeysValuesCompressed(keys_values_hashed, key_hash_to_length, leaf_hash_to_length, root_hash) + return KeysValuesCompressed(keys_values_hashed, key_hash_to_length, leaf_hash_to_length, resolved_root_hash) async def get_leaf_hashes_by_hashed_key( self, store_id: bytes32, root_hash: Optional[bytes32] = None @@ -853,7 +867,11 @@ async def get_leaf_hashes_by_hashed_key( return result async def get_keys_paginated( - self, store_id: bytes32, page: int, max_page_size: int, root_hash: Optional[bytes32] = None + self, + store_id: bytes32, + page: int, + max_page_size: int, + root_hash: Union[bytes32, Unspecified] = unspecified, ) -> KeysPaginationData: keys_values_compressed = await self.get_keys_values_compressed(store_id, root_hash) pagination_data = get_hashes_for_page(page, keys_values_compressed.key_hash_to_length, max_page_size) @@ -873,7 +891,11 @@ async def get_keys_paginated( ) async def get_keys_values_paginated( - self, store_id: bytes32, page: int, max_page_size: int, root_hash: Optional[bytes32] = None + self, + store_id: bytes32, + page: int, + max_page_size: int, + root_hash: Union[bytes32, Unspecified] = unspecified, ) -> KeysValuesPaginationData: keys_values_compressed = await self.get_keys_values_compressed(store_id, root_hash) pagination_data = get_hashes_for_page(page, keys_values_compressed.leaf_hash_to_length, max_page_size) @@ -892,7 +914,13 @@ async def get_keys_values_paginated( ) async def get_kv_diff_paginated( - self, store_id: bytes32, page: int, max_page_size: int, hash1: bytes32, hash2: bytes32 + self, + store_id: bytes32, + page: int, + max_page_size: int, + # NOTE: empty is expressed as zeros + hash1: bytes32, + hash2: bytes32, ) -> KVDiffPaginationData: old_pairs = await self.get_keys_values_compressed(store_id, hash1) if len(old_pairs.keys_values_hashed) == 0 and hash1 != bytes32([0] * 32): @@ -1031,15 +1059,25 @@ async def autoinsert( root=root, ) - async def get_keys_values_dict(self, store_id: bytes32, root_hash: Optional[bytes32] = None) -> Dict[bytes, bytes]: + async def get_keys_values_dict( + self, + store_id: bytes32, + root_hash: Union[bytes32, Unspecified] = unspecified, + ) -> Dict[bytes, bytes]: pairs = await self.get_keys_values(store_id=store_id, root_hash=root_hash) return {node.key: node.value for node in pairs} - async def get_keys(self, store_id: bytes32, root_hash: Optional[bytes32] = None) -> List[bytes]: + async def get_keys( + self, + store_id: bytes32, + root_hash: Union[bytes32, Unspecified] = unspecified, + ) -> List[bytes]: async with self.db_wrapper.reader() as reader: - if root_hash is None: + if root_hash is unspecified: root = await self.get_tree_root(store_id=store_id) - root_hash = root.node_hash + resolved_root_hash = root.node_hash + else: + resolved_root_hash = root_hash cursor = await reader.execute( """ WITH RECURSIVE @@ -1053,7 +1091,7 @@ async def get_keys(self, store_id: bytes32, root_hash: Optional[bytes32] = None) ) SELECT key FROM tree_from_root_hash WHERE node_type == :node_type """, - {"root_hash": root_hash, "node_type": NodeType.TERMINAL}, + {"root_hash": resolved_root_hash, "node_type": NodeType.TERMINAL}, ) keys: List[bytes] = [row["key"] async for row in cursor] @@ -1801,9 +1839,9 @@ async def get_node_by_key( self, key: bytes, store_id: bytes32, - root_hash: Optional[bytes32] = None, + root_hash: Union[bytes32, Unspecified] = unspecified, ) -> TerminalNode: - if root_hash is None: + if root_hash is unspecified: return await self.get_node_by_key_latest_generation(key, store_id) nodes = await self.get_keys_values(store_id=store_id, root_hash=root_hash) @@ -2212,6 +2250,7 @@ async def get_subscriptions(self) -> List[Subscription]: async def get_kv_diff( self, store_id: bytes32, + # NOTE: empty is expressed as zeros hash_1: bytes32, hash_2: bytes32, ) -> Set[DiffData]: diff --git a/chia/rpc/data_layer_rpc_api.py b/chia/rpc/data_layer_rpc_api.py index 3d559d0d3625..1611c5061c3e 100644 --- a/chia/rpc/data_layer_rpc_api.py +++ b/chia/rpc/data_layer_rpc_api.py @@ -2,7 +2,7 @@ import dataclasses from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast from chia.data_layer.data_layer_errors import OfferIntegrityError from chia.data_layer.data_layer_util import ( @@ -22,8 +22,10 @@ Subscription, TakeOfferRequest, TakeOfferResponse, + Unspecified, VerifyOfferResponse, VerifyProofResponse, + unspecified, ) from chia.data_layer.data_layer_wallet import DataLayerWallet, Mirror, verify_offer from chia.rpc.data_layer_rpc_util import marshal @@ -161,12 +163,16 @@ async def get_owned_stores(self, request: Dict[str, Any]) -> EndpointResult: async def get_value(self, request: Dict[str, Any]) -> EndpointResult: store_id = bytes32.from_hexstr(request["id"]) key = hexstr_to_bytes(request["key"]) - root_hash = request.get("root_hash") + # NOTE: being outside the rpc, this retains the none-means-unspecified semantics + root_hash: Optional[str] = request.get("root_hash") + resolved_root_hash: Union[bytes32, Unspecified] if root_hash is not None: - root_hash = bytes32.from_hexstr(root_hash) + resolved_root_hash = bytes32.from_hexstr(root_hash) + else: + resolved_root_hash = unspecified if self.service is None: raise Exception("Data layer not created") - value = await self.service.get_value(store_id=store_id, key=key, root_hash=root_hash) + value = await self.service.get_value(store_id=store_id, key=key, root_hash=resolved_root_hash) hex = None if value is not None: hex = value.hex() @@ -174,22 +180,27 @@ async def get_value(self, request: Dict[str, Any]) -> EndpointResult: async def get_keys(self, request: Dict[str, Any]) -> EndpointResult: store_id = bytes32.from_hexstr(request["id"]) - root_hash = request.get("root_hash") + # NOTE: being outside the rpc, this retains the none-means-unspecified semantics + root_hash: Optional[str] = request.get("root_hash") page = request.get("page", None) max_page_size = request.get("max_page_size", None) + resolved_root_hash: Union[bytes32, Unspecified] if root_hash is not None: - root_hash = bytes32.from_hexstr(root_hash) + resolved_root_hash = bytes32.from_hexstr(root_hash) + else: + resolved_root_hash = unspecified if self.service is None: raise Exception("Data layer not created") if page is None: - keys = await self.service.get_keys(store_id, root_hash) + keys = await self.service.get_keys(store_id, resolved_root_hash) else: - keys_paginated = await self.service.get_keys_paginated(store_id, root_hash, page, max_page_size) + keys_paginated = await self.service.get_keys_paginated(store_id, resolved_root_hash, page, max_page_size) keys = keys_paginated.keys - if keys == [] and root_hash is not None and root_hash != bytes32([0] * 32): - raise Exception(f"Can't find keys for {root_hash}") + # NOTE: here we do support zeros as the empty root + if keys == [] and resolved_root_hash is not unspecified and resolved_root_hash != bytes32([0] * 32): + raise Exception(f"Can't find keys for {resolved_root_hash}") response: EndpointResult = {"keys": [f"0x{key.hex()}" for key in keys]} @@ -206,25 +217,30 @@ async def get_keys(self, request: Dict[str, Any]) -> EndpointResult: async def get_keys_values(self, request: Dict[str, Any]) -> EndpointResult: store_id = bytes32(hexstr_to_bytes(request["id"])) - root_hash = request.get("root_hash") + # NOTE: being outside the rpc, this retains the none-means-unspecified semantics + root_hash: Optional[str] = request.get("root_hash") page = request.get("page", None) max_page_size = request.get("max_page_size", None) + resolved_root_hash: Union[bytes32, Unspecified] if root_hash is not None: - root_hash = bytes32.from_hexstr(root_hash) + resolved_root_hash = bytes32.from_hexstr(root_hash) + else: + resolved_root_hash = unspecified if self.service is None: raise Exception("Data layer not created") if page is None: - keys_values = await self.service.get_keys_values(store_id, root_hash) + keys_values = await self.service.get_keys_values(store_id, resolved_root_hash) else: keys_values_paginated = await self.service.get_keys_values_paginated( - store_id, root_hash, page, max_page_size + store_id, resolved_root_hash, page, max_page_size ) keys_values = keys_values_paginated.keys_values json_nodes = [recurse_jsonify(dataclasses.asdict(node)) for node in keys_values] - if not json_nodes and root_hash is not None and root_hash != bytes32([0] * 32): - raise Exception(f"Can't find keys and values for {root_hash}") + # NOTE: here we do support zeros as the empty root + if not json_nodes and resolved_root_hash is not unspecified and resolved_root_hash != bytes32([0] * 32): + raise Exception(f"Can't find keys and values for {resolved_root_hash}") response: EndpointResult = {"keys_values": json_nodes} From e785c7bfc613d61d2722089ca972f082b6b21610 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Thu, 11 Jul 2024 16:45:13 -0400 Subject: [PATCH 60/77] less `tree_id` (#18130) --- chia/data_layer/data_store.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/chia/data_layer/data_store.py b/chia/data_layer/data_store.py index a8c68992db18..c9919485ac91 100644 --- a/chia/data_layer/data_store.py +++ b/chia/data_layer/data_store.py @@ -1828,9 +1828,9 @@ async def maybe_get_node_from_key_hash( return None - async def maybe_get_node_by_key(self, key: bytes, tree_id: bytes32) -> Optional[TerminalNode]: + async def maybe_get_node_by_key(self, key: bytes, store_id: bytes32) -> Optional[TerminalNode]: try: - node = await self.get_node_by_key_latest_generation(key, tree_id) + node = await self.get_node_by_key_latest_generation(key, store_id) return node except KeyNotFoundError: return None From 5d71778260334200051c47c7a2e7e024be439b6a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Jul 2024 14:34:21 -0700 Subject: [PATCH 61/77] build(deps): bump boto3 from 1.34.114 to 1.34.143 (#18293) Bumps [boto3](https://github.com/boto/boto3) from 1.34.114 to 1.34.143. - [Release notes](https://github.com/boto/boto3/releases) - [Commits](https://github.com/boto/boto3/compare/1.34.114...1.34.143) --- updated-dependencies: - dependency-name: boto3 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 16 ++++++++-------- pyproject.toml | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index 088618ad719e..d9519d402400 100644 --- a/poetry.lock +++ b/poetry.lock @@ -505,17 +505,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.34.114" +version = "1.34.143" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.114-py3-none-any.whl", hash = "sha256:4460958d2b0c53bd2195b23ed5d45db2350e514486fe8caeb38b285b30742280"}, - {file = "boto3-1.34.114.tar.gz", hash = "sha256:eeb11bca9b19d12baf93436fb8a16b8b824f1f7e8b9bcc722607e862c46b1b08"}, + {file = "boto3-1.34.143-py3-none-any.whl", hash = "sha256:0d16832f23e6bd3ae94e35ea8e625529850bfad9baccd426de96ad8f445d8e03"}, + {file = "boto3-1.34.143.tar.gz", hash = "sha256:b590ce80c65149194def43ebf0ea1cf0533945502507837389a8d22e3ecbcf05"}, ] [package.dependencies] -botocore = ">=1.34.114,<1.35.0" +botocore = ">=1.34.143,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -524,13 +524,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.121" +version = "1.34.143" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.121-py3-none-any.whl", hash = "sha256:25b05c7646a9f240cde1c8f839552a43f27e71e15c42600275dea93e219f7dd9"}, - {file = "botocore-1.34.121.tar.gz", hash = "sha256:1a8f94b917c47dfd84a0b531ab607dc53570efb0d073d8686600f2d2be985323"}, + {file = "botocore-1.34.143-py3-none-any.whl", hash = "sha256:094aea179e8aaa1bc957ad49cc27d93b189dd3a1f3075d8b0ca7c445a2a88430"}, + {file = "botocore-1.34.143.tar.gz", hash = "sha256:059f032ec05733a836e04e869c5a15534420102f93116f3bc9a5b759b0651caf"}, ] [package.dependencies] @@ -3424,4 +3424,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.8.10, <3.13" -content-hash = "be1a0e30d25c49526bcd13c69e8fa4361710836ff2d5a166d1587c2a5dd33dc6" +content-hash = "34b67d87628b084feb1f674e6ea33a6910a2d6ae8d0b7b1842caabaa1ef9bce1" diff --git a/pyproject.toml b/pyproject.toml index 763ea9914591..3581b5cb3ca5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,7 @@ aiohttp = "3.9.4" # HTTP server for full node rpc aiosqlite = "0.20.0" # asyncio wrapper for sqlite, to store blocks anyio = "4.3.0" bitstring = "4.1.4" # Binary data management library -boto3 = "1.34.114" # AWS S3 for Data Layer S3 plugin +boto3 = "1.34.143" # AWS S3 for Data Layer S3 plugin chiabip158 = "1.5.1" # bip158-style wallet filters chiapos = "2.0.4" # proof of space chia_rs = "0.10.0" From f2765229cd666a0dcbee7e74170bc15ab517121e Mon Sep 17 00:00:00 2001 From: Chris Marslender Date: Thu, 11 Jul 2024 22:37:13 -0500 Subject: [PATCH 62/77] Target macos-13-arm64 runners instead of [MacOS, arm64] (#18298) --- .github/workflows/build-macos-installers.yml | 2 +- .github/workflows/check_wheel_availability.yaml | 2 +- .github/workflows/pre-commit.yml | 2 +- .github/workflows/upload-pypi-source.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-macos-installers.yml b/.github/workflows/build-macos-installers.yml index f5c3745b83e6..8d9e9f82c804 100644 --- a/.github/workflows/build-macos-installers.yml +++ b/.github/workflows/build-macos-installers.yml @@ -55,7 +55,7 @@ jobs: - runs-on: macos-12 name: intel bladebit-suffix: macos-x86-64.tar.gz - - runs-on: [MacOS, ARM64] + - runs-on: macos-13-arm64 name: m1 bladebit-suffix: macos-arm64.tar.gz diff --git a/.github/workflows/check_wheel_availability.yaml b/.github/workflows/check_wheel_availability.yaml index 7b0af2e0971d..4c1a07cf48ef 100644 --- a/.github/workflows/check_wheel_availability.yaml +++ b/.github/workflows/check_wheel_availability.yaml @@ -34,7 +34,7 @@ jobs: matrix: macos runs-on: intel: macos-12 - arm: [macos, arm64] + arm: macos-13-arm64 - name: Windows matrix: windows runs-on: diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 7beff6db7876..dd319c855926 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -31,7 +31,7 @@ jobs: matrix: macos runs-on: intel: macos-12 - arm: [macos, arm64] + arm: macos-13-arm64 - name: Windows matrix: windows runs-on: diff --git a/.github/workflows/upload-pypi-source.yml b/.github/workflows/upload-pypi-source.yml index 5275e89dc489..85a04f7e76f9 100644 --- a/.github/workflows/upload-pypi-source.yml +++ b/.github/workflows/upload-pypi-source.yml @@ -45,7 +45,7 @@ jobs: emoji: 🍎 runs-on: intel: macos-12 - arm: [macos, arm64] + arm: macos-13-arm64 - name: Windows matrix: windows emoji: 🪟 From e32728d4fa214eb6fef87ee5c62f3903f0e5a0f9 Mon Sep 17 00:00:00 2001 From: Starttoaster Date: Fri, 12 Jul 2024 09:09:44 -0700 Subject: [PATCH 63/77] Switch curl requests to github-glue action (#18300) * Switch curl requests to github-glue action * Prettier situationally prefers double quotes so make the bot happy * Add name lines back to start release job steps * Use github format environment variable in sync test start steps * Remove redundant github-jwt step since that is wrapped in the glue action --- .../workflows/build-linux-installer-deb.yml | 19 ++++++++++++------- .../workflows/build-linux-installer-rpm.yml | 19 ++++++++++++------- .github/workflows/build-macos-installers.yml | 19 ++++++++++++------- .github/workflows/build-windows-installer.yml | 19 ++++++++++++------- .github/workflows/start-release.yml | 18 ++++++++++++------ .github/workflows/start-sync-test.yml | 19 ++++++++++++++----- .github/workflows/trigger-docker-dev.yml | 19 ++++++++++++++----- .github/workflows/trigger-docker-main.yml | 19 ++++++++++++++----- 8 files changed, 102 insertions(+), 49 deletions(-) diff --git a/.github/workflows/build-linux-installer-deb.yml b/.github/workflows/build-linux-installer-deb.yml index 5c139d319331..8d65b6df9019 100644 --- a/.github/workflows/build-linux-installer-deb.yml +++ b/.github/workflows/build-linux-installer-deb.yml @@ -317,18 +317,23 @@ jobs: build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb \ build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb - - uses: Chia-Network/actions/github/jwt@main - if: steps.check_secrets.outputs.HAS_GLUE_SECRET - - name: Mark pre-release installer complete + uses: Chia-Network/actions/github/glue@main if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.PRE_RELEASE == 'true' - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"chia_ref": "${{ env.RELEASE_TAG }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}/success/${{ matrix.os.glue-name }} + with: + json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}" + glue_path: "success/${{ matrix.os.glue-name }}" - name: Mark release installer complete + uses: Chia-Network/actions/github/glue@main if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.FULL_RELEASE == 'true' - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"chia_ref": "${{ env.RELEASE_TAG }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}/success/${{ matrix.os.glue-name }} + with: + json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}" + glue_path: "success/${{ matrix.os.glue-name }}" test: name: Test ${{ matrix.distribution.name }} ${{ matrix.mode.name }} ${{ matrix.arch.name }} diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index aef0ad1be609..c1f401115f23 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -310,18 +310,23 @@ jobs: build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm \ build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm - - uses: Chia-Network/actions/github/jwt@main - if: steps.check_secrets.outputs.HAS_GLUE_SECRET - - name: Mark pre-release installer complete + uses: Chia-Network/actions/github/glue@main if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.PRE_RELEASE == 'true' - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"chia_ref": "${{ env.RELEASE_TAG }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}/success/build-linux-rpm + with: + json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}" + glue_path: "success/build-linux-rpm" - name: Mark release installer complete + uses: Chia-Network/actions/github/glue@main if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.FULL_RELEASE == 'true' - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"chia_ref": "${{ env.RELEASE_TAG }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}/success/build-linux-rpm + with: + json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}" + glue_path: "success/build-linux-rpm" test: name: Test ${{ matrix.distribution.name }} ${{ matrix.mode.name }} ${{ matrix.state.name }} diff --git a/.github/workflows/build-macos-installers.yml b/.github/workflows/build-macos-installers.yml index 8d9e9f82c804..b18e9f473b1d 100644 --- a/.github/workflows/build-macos-installers.yml +++ b/.github/workflows/build-macos-installers.yml @@ -355,18 +355,23 @@ jobs: $RELEASE_TAG \ build_scripts/final_installer/*.dmg - - uses: Chia-Network/actions/github/jwt@main - if: steps.check_secrets.outputs.HAS_GLUE_SECRET - - name: Mark pre-release installer complete + uses: Chia-Network/actions/github/glue@main if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.PRE_RELEASE == 'true' - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"chia_ref": "${{ env.RELEASE_TAG }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}/success/${{ matrix.os.glue-name }} + with: + json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}" + glue_path: "success/${{ matrix.os.glue-name }}" - name: Mark release installer complete + uses: Chia-Network/actions/github/glue@main if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.FULL_RELEASE == 'true' - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"chia_ref": "${{ env.RELEASE_TAG }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}/success/${{ matrix.os.glue-name }} + with: + json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}" + glue_path: "success/${{ matrix.os.glue-name }}" test: name: Test ${{ matrix.os.name }} ${{ matrix.arch.name }} diff --git a/.github/workflows/build-windows-installer.yml b/.github/workflows/build-windows-installer.yml index da3653d9b269..3d0c9235b749 100644 --- a/.github/workflows/build-windows-installer.yml +++ b/.github/workflows/build-windows-installer.yml @@ -359,18 +359,23 @@ jobs: run: | gh release upload --repo ${{ github.repository }} $RELEASE_TAG "${GITHUB_WORKSPACE}"/chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe - - uses: Chia-Network/actions/github/jwt@main - if: steps.check_secrets.outputs.HAS_GLUE_SECRET - - name: Mark pre-release installer complete + uses: Chia-Network/actions/github/glue@main if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.PRE_RELEASE == 'true' - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"chia_ref": "${{ env.RELEASE_TAG }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}/success/build-windows + with: + json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}" + glue_path: "success/build-windows" - name: Mark release installer complete + uses: Chia-Network/actions/github/glue@main if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.FULL_RELEASE == 'true' - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"chia_ref": "${{ env.RELEASE_TAG }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}/success/build-windows + with: + json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}" + glue_path: "success/build-windows" test: name: Test ${{ matrix.os.name }} diff --git a/.github/workflows/start-release.yml b/.github/workflows/start-release.yml index 3c44f8516012..9d5d739f7e01 100644 --- a/.github/workflows/start-release.yml +++ b/.github/workflows/start-release.yml @@ -19,14 +19,20 @@ jobs: env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - uses: Chia-Network/actions/github/jwt@main - - name: Start pre-release + uses: Chia-Network/actions/github/glue@main if: "github.event.release.prerelease" - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"chia_ref": "${{ env.RELEASE_TAG }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}/start + with: + json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}" + glue_path: "start" - name: Start release + uses: Chia-Network/actions/github/glue@main if: "!github.event.release.prerelease" - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"chia_ref": "${{ env.RELEASE_TAG }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}/start + with: + json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}" + glue_path: "start" diff --git a/.github/workflows/start-sync-test.yml b/.github/workflows/start-sync-test.yml index 790063c33999..010f605a1cdd 100644 --- a/.github/workflows/start-sync-test.yml +++ b/.github/workflows/start-sync-test.yml @@ -19,9 +19,18 @@ jobs: env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - uses: Chia-Network/actions/github/jwt@main + - name: Trigger sync test workflow via github-glue + uses: Chia-Network/actions/github/glue@main + with: + json_data: '{"test_ref": "${{ env.RELEASE_TAG }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "sync-test/${{ env.RELEASE_TAG }}" + glue_path: "start" - - name: Trigger Workflow - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"test_ref": "${{ env.RELEASE_TAG }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/sync-test/$RELEASE_TAG/start - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"test_ref": "${{ env.RELEASE_TAG }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/sync-test/$RELEASE_TAG/success/deploy + - name: Trigger sync test workflow success via github-glue + uses: Chia-Network/actions/github/glue@main + with: + json_data: '{"test_ref": "${{ env.RELEASE_TAG }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "sync-test/${{ env.RELEASE_TAG }}" + glue_path: "success/deploy" diff --git a/.github/workflows/trigger-docker-dev.yml b/.github/workflows/trigger-docker-dev.yml index 33e78fd1dc3e..dfac2e511780 100644 --- a/.github/workflows/trigger-docker-dev.yml +++ b/.github/workflows/trigger-docker-dev.yml @@ -36,11 +36,20 @@ jobs: env: GLUE_API_URL: "${{ secrets.GLUE_API_URL }}" - - uses: Chia-Network/actions/github/jwt@main + - name: Trigger docker dev workflow via github-glue + uses: Chia-Network/actions/github/glue@main if: steps.check_secrets.outputs.HAS_SECRET + with: + json_data: '{"sha":"${{ github.sha }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "docker-build-dev/${{ github.sha }}" + glue_path: "start" - - name: Trigger docker dev workflow via github-glue + - name: Trigger docker dev success via github-glue + uses: Chia-Network/actions/github/glue@main if: steps.check_secrets.outputs.HAS_SECRET - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"sha":"${{ github.sha }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/docker-build-dev/${{ github.sha }}/start - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{"sha":"${{ github.sha }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/docker-build-dev/${{ github.sha }}/success/build-dev + with: + json_data: '{"sha":"${{ github.sha }}"}' + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "docker-build-dev/${{ github.sha }}" + glue_path: "success/build-dev" diff --git a/.github/workflows/trigger-docker-main.yml b/.github/workflows/trigger-docker-main.yml index e6b44207ea8e..7de0f0720a4f 100644 --- a/.github/workflows/trigger-docker-main.yml +++ b/.github/workflows/trigger-docker-main.yml @@ -21,9 +21,18 @@ jobs: name: Trigger building a new `main` tag for the chia-docker image runs-on: ubuntu-latest steps: - - uses: Chia-Network/actions/github/jwt@main - - name: Trigger docker main workflow via github-glue - run: | - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{}' ${{ secrets.GLUE_API_URL }}/api/v1/docker-build-main/${{ github.sha }}/start - curl -s -XPOST -H "Authorization: Bearer ${{ env.JWT_TOKEN }}" --data '{}' ${{ secrets.GLUE_API_URL }}/api/v1/docker-build-main/${{ github.sha }}/success/build-main + uses: Chia-Network/actions/github/glue@main + with: + json_data: "{}" + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "docker-build-main/${{ github.sha }}" + glue_path: "start" + + - name: Trigger docker main success via github-glue + uses: Chia-Network/actions/github/glue@main + with: + json_data: "{}" + glue_url: "${{ secrets.GLUE_API_URL }}" + glue_project: "docker-build-main/${{ github.sha }}" + glue_path: "success/build-main" From 2ff9c876b57d0358f47f10e09d23f0ab6d2ad1cc Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Fri, 12 Jul 2024 21:28:41 +0200 Subject: [PATCH 64/77] [CHIA-902] default flags for `Program.run()` and `Program.run_with_cost()` (#18287) make plain Program.run() and Program.run_with_cost() default to enabling all the most recent features, and to disallow unknown opcodes (i.e. strict mode). To have full control of which features are enabled, introduce a run2() function --- chia/_tests/clvm/test_program.py | 18 +++++++++++++++ chia/types/blockchain_format/program.py | 29 +++++++++++++++++++++++-- 2 files changed, 45 insertions(+), 2 deletions(-) diff --git a/chia/_tests/clvm/test_program.py b/chia/_tests/clvm/test_program.py index ce392d9dc879..e742b4391e3d 100644 --- a/chia/_tests/clvm/test_program.py +++ b/chia/_tests/clvm/test_program.py @@ -1,6 +1,7 @@ from __future__ import annotations import pytest +from chia_rs import ENABLE_FIXED_DIV from clvm.EvalError import EvalError from clvm.operators import KEYWORD_TO_ATOM from clvm_tools.binutils import assemble, disassemble @@ -108,3 +109,20 @@ def test_uncurry_args_garbage(): # there's garbage at the end of the args list plus = Program.to(assemble("(2 (q . 1) (c (q . 1) (q . 1) (q . 0x1337)))")) assert plus.uncurry() == (plus, Program.to(0)) + + +def test_run() -> None: + div = Program.to(assemble("(/ 2 5)")) + ret = div.run([10, 5]) + assert ret.atom == bytes([2]) + + ret = div.run([10, -5]) + assert ret.atom == bytes([0xFE]) + + with pytest.raises(ValueError, match="div operator with negative operands is deprecated"): + cost, ret = div.run_with_flags(100000, 0, [10, -5]) + + cost, ret = div.run_with_flags(100000, ENABLE_FIXED_DIV, [10, -5]) + assert cost == 1107 + print(ret) + assert ret.atom == bytes([0xFE]) diff --git a/chia/types/blockchain_format/program.py b/chia/types/blockchain_format/program.py index 864936a16fc1..9c0f12e7beef 100644 --- a/chia/types/blockchain_format/program.py +++ b/chia/types/blockchain_format/program.py @@ -3,7 +3,18 @@ import io from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Set, Tuple, Type, TypeVar -from chia_rs import ALLOW_BACKREFS, run_chia_program, tree_hash +from chia_rs import ( + AGG_SIG_ARGS, + ALLOW_BACKREFS, + DISALLOW_INFINITY_G1, + ENABLE_BLS_OPS_OUTSIDE_GUARD, + ENABLE_FIXED_DIV, + ENABLE_MESSAGE_CONDITIONS, + ENABLE_SOFTFORK_CONDITION, + MEMPOOL_MODE, + run_chia_program, + tree_hash, +) from clvm.casts import int_from_bytes from clvm.CLVMObject import CLVMStorage from clvm.EvalError import EvalError @@ -129,12 +140,26 @@ def _run(self, max_cost: int, flags: int, args: Any) -> Tuple[int, Program]: return cost, Program.to(r) def run_with_cost(self, max_cost: int, args: Any) -> Tuple[int, Program]: - return self._run(max_cost, 0, args) + # when running puzzles in the wallet, default to enabling all soft-forks + # as well as enabling mempool-mode (i.e. strict mode) + default_flags = ( + ENABLE_SOFTFORK_CONDITION + | ENABLE_BLS_OPS_OUTSIDE_GUARD + | ENABLE_FIXED_DIV + | AGG_SIG_ARGS + | ENABLE_MESSAGE_CONDITIONS + | DISALLOW_INFINITY_G1 + | MEMPOOL_MODE + ) + return self._run(max_cost, default_flags, args) def run(self, args: Any) -> Program: cost, r = self.run_with_cost(INFINITE_COST, args) return r + def run_with_flags(self, max_cost: int, flags: int, args: Any) -> Tuple[int, Program]: + return self._run(max_cost, flags, args) + # Replicates the curry function from clvm_tools, taking advantage of *args # being a list. We iterate through args in reverse building the code to # create a clvm list. From 3b4d71e8f2a4ae9cb408f4cf0ba89ae4a86bf8a6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 Jul 2024 12:28:56 -0700 Subject: [PATCH 65/77] Bump pyinstaller from 6.7.0 to 6.9.0 (#18283) Bumps [pyinstaller](https://github.com/pyinstaller/pyinstaller) from 6.7.0 to 6.9.0. - [Release notes](https://github.com/pyinstaller/pyinstaller/releases) - [Changelog](https://github.com/pyinstaller/pyinstaller/blob/develop/doc/CHANGES.rst) - [Commits](https://github.com/pyinstaller/pyinstaller/compare/v6.7.0...v6.9.0) --- updated-dependencies: - dependency-name: pyinstaller dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 36 ++++++++++++++++++------------------ pyproject.toml | 2 +- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index d9519d402400..eab0e94fd726 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2442,23 +2442,23 @@ plugins = ["importlib-metadata"] [[package]] name = "pyinstaller" -version = "6.7.0" +version = "6.9.0" description = "PyInstaller bundles a Python application and all its dependencies into a single package." optional = true python-versions = "<3.13,>=3.8" files = [ - {file = "pyinstaller-6.7.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:6decedba07031d1318528cb76d8400ae1572f7b08197f771ceca9e454e0060bf"}, - {file = "pyinstaller-6.7.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0756b3d4d3283ae2a5bda56abe479b80801ecafecdb3a96cd928542c2c75d016"}, - {file = "pyinstaller-6.7.0-py3-none-manylinux2014_i686.whl", hash = "sha256:df1b66500a7def997790bdadc23c142a2f96585ccd440beac63b72a4f3e41684"}, - {file = "pyinstaller-6.7.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:fa552214a8cbb5bfe4621c46a73c3cce12f299a520aa5ac397dc18718278f03a"}, - {file = "pyinstaller-6.7.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:5263ecbfd34a2297f0e5d41ecfcf7a6fb1ebbf60dbe0dc7c2d64f4a55871a99d"}, - {file = "pyinstaller-6.7.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:4ff8ce04f1e5ab3a65d4a1ee6036cba648d0cdae6a7a33c6f0ca4ace46cdd43c"}, - {file = "pyinstaller-6.7.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:95efc2de7722213f376c5bac9620f390899f9a3c9eed70bd65adf29e2a085d5f"}, - {file = "pyinstaller-6.7.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:1b6dd6a50a7315214d345875cd08f8aa71025e7ba6bfa0f95c09285585e8d372"}, - {file = "pyinstaller-6.7.0-py3-none-win32.whl", hash = "sha256:73b94ce02b208c34eaabd032dd1522a3c03c0b3118a31bf7e4eafe7a9f4af2da"}, - {file = "pyinstaller-6.7.0-py3-none-win_amd64.whl", hash = "sha256:a3f85935b40f89e717f1e67377d3bfc953060e5795828ecf5357e2c1f7aa52bf"}, - {file = "pyinstaller-6.7.0-py3-none-win_arm64.whl", hash = "sha256:53038419ca09eea59de02dfb52453dd327983b0957821be610fb04cfd84676d0"}, - {file = "pyinstaller-6.7.0.tar.gz", hash = "sha256:8f09179c5f3d1b4b8453ac61adfe394dd416f9fc33abd7553f77d4897bc3a582"}, + {file = "pyinstaller-6.9.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:5ced2e83acf222b936ea94abc5a5cc96588705654b39138af8fb321d9cf2b954"}, + {file = "pyinstaller-6.9.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:f18a3d551834ef8fb7830d48d4cc1527004d0e6b51ded7181e78374ad6111846"}, + {file = "pyinstaller-6.9.0-py3-none-manylinux2014_i686.whl", hash = "sha256:f2fc568de3d6d2a176716a3fc9f20da06d351e8bea5ddd10ecb5659fce3a05b0"}, + {file = "pyinstaller-6.9.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:a0f378f64ad0655d11ade9fde7877e7573fd3d5066231608ce7dfa9040faecdd"}, + {file = "pyinstaller-6.9.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:7bf0c13c5a8560c89540746ae742f4f4b82290e95a6b478374d9f34959fe25d6"}, + {file = "pyinstaller-6.9.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:da994aba14c5686db88796684de265a8665733b4df09b939f7ebdf097d18df72"}, + {file = "pyinstaller-6.9.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:4e3e50743c091a06e6d01c59bdd6d03967b453ee5384a9e790759be4129db4a4"}, + {file = "pyinstaller-6.9.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:b041be2fe78da47a269604d62c940d68c62f9a3913bdf64af4123f7689d47099"}, + {file = "pyinstaller-6.9.0-py3-none-win32.whl", hash = "sha256:2bf4de17a1c63c0b797b38e13bfb4d03b5ee7c0a68e28b915a7eaacf6b76087f"}, + {file = "pyinstaller-6.9.0-py3-none-win_amd64.whl", hash = "sha256:43709c70b1da8441a730327a8ed362bfcfdc3d42c1bf89f3e2b0a163cc4e7d33"}, + {file = "pyinstaller-6.9.0-py3-none-win_arm64.whl", hash = "sha256:f15c1ef11ed5ceb32447dfbdab687017d6adbef7fc32aa359d584369bfe56eda"}, + {file = "pyinstaller-6.9.0.tar.gz", hash = "sha256:f4a75c552facc2e2a370f1e422b971b5e5cdb4058ff38cea0235aa21fc0b378f"}, ] [package.dependencies] @@ -2467,7 +2467,7 @@ importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""} packaging = ">=22.0" pefile = {version = ">=2022.5.30", markers = "sys_platform == \"win32\""} -pyinstaller-hooks-contrib = ">=2024.6" +pyinstaller-hooks-contrib = ">=2024.7" pywin32-ctypes = {version = ">=0.2.1", markers = "sys_platform == \"win32\""} setuptools = ">=42.0.0" @@ -2477,13 +2477,13 @@ hook-testing = ["execnet (>=1.5.0)", "psutil", "pytest (>=2.7.3)"] [[package]] name = "pyinstaller-hooks-contrib" -version = "2024.6" +version = "2024.7" description = "Community maintained hooks for PyInstaller" optional = true python-versions = ">=3.7" files = [ - {file = "pyinstaller_hooks_contrib-2024.6-py2.py3-none-any.whl", hash = "sha256:6cc88dad75261d9e1a7e0c6385139f35dcdbb16640c911a27f6078fe924a38cf"}, - {file = "pyinstaller_hooks_contrib-2024.6.tar.gz", hash = "sha256:3c188b3a79f5cd46d96520df3934642556a1b6ce8988ec5bbce820ada424bc2b"}, + {file = "pyinstaller_hooks_contrib-2024.7-py2.py3-none-any.whl", hash = "sha256:8bf0775771fbaf96bcd2f4dfd6f7ae6c1dd1b1efe254c7e50477b3c08e7841d8"}, + {file = "pyinstaller_hooks_contrib-2024.7.tar.gz", hash = "sha256:fd5f37dcf99bece184e40642af88be16a9b89613ecb958a8bd1136634fc9fac5"}, ] [package.dependencies] @@ -3424,4 +3424,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.8.10, <3.13" -content-hash = "34b67d87628b084feb1f674e6ea33a6910a2d6ae8d0b7b1842caabaa1ef9bce1" +content-hash = "d06ac556d0f2c8febd90fe4a45585276e28ded80b7c297c13259a462862a14dc" diff --git a/pyproject.toml b/pyproject.toml index 3581b5cb3ca5..8ab52b824e1a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,7 +90,7 @@ isort = { version = "5.13.2", optional = true } mypy = { version = "1.10.0", optional = true } pre-commit = [ { version = "3.5.0", python = "<3.9", optional = true }, { version = "3.7.1", python = ">=3.9", optional = true } ] py3createtorrent = { version = "1.1.0", optional = true } -pyinstaller = { version = "6.7.0", optional = true } +pyinstaller = { version = "6.9.0", optional = true } pylint = { version = "3.2.2", optional = true } pytest = { version = "8.1.1", optional = true } pytest-cov = { version = "5.0.0", optional = true } From 064d34f1844aec94650b2ac1d698c7704f501bf9 Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Fri, 12 Jul 2024 12:29:09 -0700 Subject: [PATCH 66/77] [CHIA-738] Add a better clawback auto claim test and fix related issue (#18141) * Add the concept of 'action scopes' * Add `WalletActionScope` * Fix CATWallet pending_change calculation * Add the concept of 'action scopes' * pylint and test coverage * add try/finally * add try/except * Undo giving a variable a name * Fix CRCAT test * Fix trade tests * Fix cat test * Add auto claim test coverage --- chia/_tests/wallet/test_wallet.py | 115 +++++++++++------------------- chia/wallet/wallet_node.py | 12 ++-- 2 files changed, 48 insertions(+), 79 deletions(-) diff --git a/chia/_tests/wallet/test_wallet.py b/chia/_tests/wallet/test_wallet.py index 9ce2ba9cbd03..82290777c82d 100644 --- a/chia/_tests/wallet/test_wallet.py +++ b/chia/_tests/wallet/test_wallet.py @@ -21,7 +21,7 @@ from chia.types.spend_bundle import estimate_fees from chia.util.bech32m import encode_puzzle_hash from chia.util.errors import Err -from chia.util.ints import uint32, uint64 +from chia.util.ints import uint16, uint32, uint64 from chia.wallet.conditions import ConditionValidTimes from chia.wallet.derive_keys import master_sk_to_wallet_sk from chia.wallet.payment import Payment @@ -163,12 +163,15 @@ async def test_wallet_reuse_address(self, wallet_environments: WalletTestFramewo @pytest.mark.parametrize( "wallet_environments", - [{"num_environments": 2, "blocks_needed": [1, 1], "reuse_puzhash": True}], + [{"num_environments": 2, "blocks_needed": [2, 1], "reuse_puzhash": True}], indirect=True, ) + @pytest.mark.parametrize("number_of_coins", [1, 3]) @pytest.mark.limit_consensus_modes(reason="irrelevant") @pytest.mark.anyio - async def test_wallet_clawback_claim_auto(self, wallet_environments: WalletTestFramework) -> None: + async def test_wallet_clawback_claim_auto( + self, wallet_environments: WalletTestFramework, number_of_coins: int + ) -> None: env = wallet_environments.environments[0] env_1 = wallet_environments.environments[1] wallet = env.xch_wallet @@ -180,34 +183,35 @@ async def test_wallet_clawback_claim_auto(self, wallet_environments: WalletTestF normal_puzhash = await wallet_1.get_new_puzzlehash() # Transfer to normal wallet - [tx1] = await wallet.generate_signed_transaction( - uint64(tx_amount), - normal_puzhash, - DEFAULT_TX_CONFIG, - uint64(0), - puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 10}], - ) - [tx1] = await wallet.wallet_state_manager.add_pending_transactions([tx1]) + for _ in range(0, number_of_coins): + [tx1] = await wallet.generate_signed_transaction( + uint64(tx_amount), + normal_puzhash, + DEFAULT_TX_CONFIG, + uint64(0), + puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 10}], + ) + [tx1] = await wallet.wallet_state_manager.add_pending_transactions([tx1]) await wallet_environments.process_pending_states( [ WalletStateTransition( pre_block_balance_updates={ 1: { - "unconfirmed_wallet_balance": -1 * tx_amount, - "<=#spendable_balance": -1 * tx_amount, - "<=#max_send_amount": -1 * tx_amount, + "unconfirmed_wallet_balance": -1 * tx_amount * number_of_coins, + "<=#spendable_balance": -1 * tx_amount * number_of_coins, + "<=#max_send_amount": -1 * tx_amount * number_of_coins, ">=#pending_change": 1, # any amount increase - "pending_coin_removal_count": 1, + "pending_coin_removal_count": number_of_coins, } }, post_block_balance_updates={ 1: { - "confirmed_wallet_balance": -1 * tx_amount, + "confirmed_wallet_balance": -1 * tx_amount * number_of_coins, ">=#spendable_balance": 1, # any amount increase ">=#max_send_amount": 1, # any amount increase "<=#pending_change": -1, # any amount decrease - "pending_coin_removal_count": -1, + "pending_coin_removal_count": -number_of_coins, } }, ), @@ -218,57 +222,19 @@ async def test_wallet_clawback_claim_auto(self, wallet_environments: WalletTestF ] ) - await time_out_assert(20, wsm.coin_store.count_small_unspent, 1, 1000, CoinType.CLAWBACK) - await time_out_assert(20, wsm_1.coin_store.count_small_unspent, 1, 1000, CoinType.CLAWBACK) - - [tx2] = await wallet.generate_signed_transaction( - uint64(tx_amount), - normal_puzhash, - DEFAULT_TX_CONFIG, - uint64(0), - puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 10}], - ) - [tx2] = await wallet.wallet_state_manager.add_pending_transactions([tx2]) - - await wallet_environments.process_pending_states( - [ - WalletStateTransition( - pre_block_balance_updates={ - 1: { - "unconfirmed_wallet_balance": -1 * tx_amount, - "<=#spendable_balance": -1 * tx_amount, - "<=#max_send_amount": -1 * tx_amount, - ">=#pending_change": 1, # any amount increase - "pending_coin_removal_count": 1, - } - }, - post_block_balance_updates={ - 1: { - "confirmed_wallet_balance": -1 * tx_amount, - ">=#spendable_balance": 1, # any amount increase - ">=#max_send_amount": 1, # any amount increase - "<=#pending_change": -1, # any amount decrease - "pending_coin_removal_count": -1, - } - }, - ), - WalletStateTransition( - pre_block_balance_updates={}, - post_block_balance_updates={}, - ), - ] + await time_out_assert(20, wsm.coin_store.count_small_unspent, number_of_coins, tx_amount * 2, CoinType.CLAWBACK) + await time_out_assert( + 20, wsm_1.coin_store.count_small_unspent, number_of_coins, tx_amount * 2, CoinType.CLAWBACK ) - await time_out_assert(20, wsm.coin_store.count_small_unspent, 2, 1000, CoinType.CLAWBACK) - await time_out_assert(20, wsm_1.coin_store.count_small_unspent, 2, 1000, CoinType.CLAWBACK) - [tx3] = await wallet.generate_signed_transaction( + [tx_bad] = await wallet.generate_signed_transaction( uint64(tx_amount), normal_puzhash, DEFAULT_TX_CONFIG, uint64(0), puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 10}], ) - [tx3] = await wallet.wallet_state_manager.add_pending_transactions([tx3]) + [tx_bad] = await wallet.wallet_state_manager.add_pending_transactions([tx_bad]) await wallet_environments.process_pending_states( [ @@ -299,13 +265,13 @@ async def test_wallet_clawback_claim_auto(self, wallet_environments: WalletTestF ] ) - # Change 3rd coin to test missing metadata case - clawback_coin_id = tx3.additions[0].name() + # Change one coin to test missing metadata case + clawback_coin_id = tx_bad.additions[0].name() coin_record = await wsm_1.coin_store.get_coin_record(clawback_coin_id) assert coin_record is not None await wsm_1.coin_store.add_coin_record(dataclasses.replace(coin_record, metadata=None)) # Claim merkle coin - env_1.node.set_auto_claim(AutoClaimSettings(enabled=True)) + env_1.node.set_auto_claim(AutoClaimSettings(enabled=True, batch_size=uint16(2))) # Trigger auto claim await wallet_environments.process_pending_states( [ @@ -315,9 +281,10 @@ async def test_wallet_clawback_claim_auto(self, wallet_environments: WalletTestF # After auto claim is set, the next block will trigger submission of clawback claims post_block_balance_updates={ 1: { - "unconfirmed_wallet_balance": 1000, - "pending_change": 1000, # This is a little weird but I think intentional and correct - "pending_coin_removal_count": 2, + "unconfirmed_wallet_balance": tx_amount * number_of_coins, + "pending_change": tx_amount + * number_of_coins, # This is a little weird but I think intentional and correct + "pending_coin_removal_count": number_of_coins, } }, ), @@ -330,19 +297,19 @@ async def test_wallet_clawback_claim_auto(self, wallet_environments: WalletTestF pre_block_balance_updates={}, post_block_balance_updates={ 1: { - "confirmed_wallet_balance": 1000, - "spendable_balance": 1000, - "max_send_amount": 1000, - "unspent_coin_count": 2, - "pending_change": -1000, - "pending_coin_removal_count": -2, + "confirmed_wallet_balance": tx_amount * number_of_coins, + "spendable_balance": tx_amount * number_of_coins, + "max_send_amount": tx_amount * number_of_coins, + "unspent_coin_count": number_of_coins, + "pending_change": -tx_amount * number_of_coins, + "pending_coin_removal_count": -1 * number_of_coins, } }, ), ] ) - await time_out_assert(20, wsm.coin_store.count_small_unspent, 1, 1000, CoinType.CLAWBACK) - await time_out_assert(20, wsm_1.coin_store.count_small_unspent, 1, 1000, CoinType.CLAWBACK) + await time_out_assert(20, wsm.coin_store.count_small_unspent, 1, tx_amount * 2, CoinType.CLAWBACK) + await time_out_assert(20, wsm_1.coin_store.count_small_unspent, 1, tx_amount * 2, CoinType.CLAWBACK) @pytest.mark.parametrize( "wallet_environments", diff --git a/chia/wallet/wallet_node.py b/chia/wallet/wallet_node.py index fa909a5d2ce2..5380d8a27283 100644 --- a/chia/wallet/wallet_node.py +++ b/chia/wallet/wallet_node.py @@ -649,9 +649,6 @@ async def _process_new_subscriptions(self) -> None: peer = item.data[1] assert peer is not None await self.new_peak_wallet(new_peak, peer) - # Check if any coin needs auto spending - if self.config.get("auto_claim", {}).get("enabled", False): - await self.wallet_state_manager.auto_claim_coins() else: self.log.debug("Pulled from queue: UNKNOWN %s", item.item_type) assert False @@ -1161,12 +1158,17 @@ async def new_peak_wallet(self, new_peak: NewPeakWallet, peer: WSChiaConnection) if not await self.new_peak_from_untrusted(new_peak_hb, peer): return - if peer.peer_node_id in self.synced_peers: - await self.wallet_state_manager.blockchain.set_finished_sync_up_to(new_peak.height) # todo why do we call this if there was an exception / the sync is not finished async with self.wallet_state_manager.lock: await self.wallet_state_manager.new_peak(new_peak.height) + # Check if any coin needs auto spending + if self.config.get("auto_claim", {}).get("enabled", False): + await self.wallet_state_manager.auto_claim_coins() + + if peer.peer_node_id in self.synced_peers: + await self.wallet_state_manager.blockchain.set_finished_sync_up_to(new_peak.height) + async def new_peak_from_trusted( self, new_peak_hb: HeaderBlock, latest_timestamp: uint64, peer: WSChiaConnection ) -> None: From e0840affbc3692e858dd4d6d4d8adb9111c9b8ce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 14 Jul 2024 21:36:18 -0700 Subject: [PATCH 67/77] build(deps): bump filelock from 3.14.0 to 3.15.4 (#18252) Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.14.0 to 3.15.4. - [Release notes](https://github.com/tox-dev/py-filelock/releases) - [Changelog](https://github.com/tox-dev/filelock/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/py-filelock/compare/3.14.0...3.15.4) --- updated-dependencies: - dependency-name: filelock dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index eab0e94fd726..cb4df1725342 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1300,18 +1300,18 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "filelock" -version = "3.14.0" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, - {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -3424,4 +3424,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.8.10, <3.13" -content-hash = "d06ac556d0f2c8febd90fe4a45585276e28ded80b7c297c13259a462862a14dc" +content-hash = "812c5cdf6c64d6fb0ef4d9b7baa0073daccfbbee21a2fa39d66ab7437a6f2d6e" diff --git a/pyproject.toml b/pyproject.toml index 8ab52b824e1a..1d4d08952736 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ concurrent_log_handler = "0.9.25" # Concurrently log and rotate logs cryptography = "42.0.5" # Python cryptography library for TLS - keyring conflict dnslib = "0.9.24" # dns lib dnspython = "2.6.1" # Query DNS seeds -filelock = "3.14.0" # For reading and writing config multiprocess and multithread safely (non-reentrant locks) +filelock = "3.15.4" # For reading and writing config multiprocess and multithread safely (non-reentrant locks) keyring = "25.1.0" # Store keys in MacOS Keychain, Windows Credential Locker packaging = "24.0" pip = "24.0" From 1a7d72271402697be5592e6676c9448839b9957b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jul 2024 10:55:00 -0700 Subject: [PATCH 68/77] build(deps): bump aiohttp from 3.9.4 to 3.9.5 (#18203) Bumps [aiohttp](https://github.com/aio-libs/aiohttp) from 3.9.4 to 3.9.5. - [Release notes](https://github.com/aio-libs/aiohttp/releases) - [Changelog](https://github.com/aio-libs/aiohttp/blob/master/CHANGES.rst) - [Commits](https://github.com/aio-libs/aiohttp/compare/v3.9.4...v3.9.5) --- updated-dependencies: - dependency-name: aiohttp dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 156 ++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 79 insertions(+), 79 deletions(-) diff --git a/poetry.lock b/poetry.lock index cb4df1725342..2d7b9ebaf413 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,87 +13,87 @@ files = [ [[package]] name = "aiohttp" -version = "3.9.4" +version = "3.9.5" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, - {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, - {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, - {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, - {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, - {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, - {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, - {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, - {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, - {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, - {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, - {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, - {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, - {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, + {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, + {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, + {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, + {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, + {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, + {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, + {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, + {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, + {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, ] [package.dependencies] @@ -3424,4 +3424,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.8.10, <3.13" -content-hash = "812c5cdf6c64d6fb0ef4d9b7baa0073daccfbbee21a2fa39d66ab7437a6f2d6e" +content-hash = "047b1073091dde271fd618d0464255a087f323c6d4593d4c2d5d87149949eac6" diff --git a/pyproject.toml b/pyproject.toml index 1d4d08952736..fd73373709c7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ priority = "supplemental" [tool.poetry.dependencies] python = ">=3.8.10, <3.13" aiofiles = "23.2.1" # Async IO for files -aiohttp = "3.9.4" # HTTP server for full node rpc +aiohttp = "3.9.5" # HTTP server for full node rpc aiosqlite = "0.20.0" # asyncio wrapper for sqlite, to store blocks anyio = "4.3.0" bitstring = "4.1.4" # Binary data management library From dfa0557ca29744dea6cc9286c4b886872b81ee70 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Mon, 15 Jul 2024 15:36:34 -0400 Subject: [PATCH 69/77] attempt to "commonize" prettier config (#18297) * attempt to "commonize" prettier config * apply to md * manual touchup --- .github/PULL_REQUEST_TEMPLATE.md | 11 +- .pre-commit-config.yaml | 2 +- .prettierrc.yaml | 7 + BUILD_TIMELORD.md | 4 +- CHANGELOG.md | 715 ++++++++++++++++--------------- CODE_OF_CONDUCT.md | 26 +- LEGACY-SUPPORT-POLICY.md | 2 +- PRETTY_GOOD_PRACTICES.md | 11 +- README.md | 93 ++-- 9 files changed, 453 insertions(+), 418 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index e19adddbab02..912fc0cc64b9 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -3,22 +3,17 @@ - In order to be merged, you must add the most appropriate category Label (Added, Changed, Fixed) to your PR --> -### Purpose: - +### Purpose: -### Current Behavior: - +### Current Behavior: ### New Behavior: - - -### Testing Notes: - +### Testing Notes: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 58b2d57bf898..0a89e633313a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -32,7 +32,7 @@ repos: rev: v3.1.0 hooks: - id: prettier - types_or: [ini, json, toml, yaml] + types_or: [ini, json, toml, yaml, markdown] - repo: https://github.com/scop/pre-commit-shfmt rev: v3.8.0-1 hooks: diff --git a/.prettierrc.yaml b/.prettierrc.yaml index c4ac3fcf5f8d..27a0fcfbbfa3 100644 --- a/.prettierrc.yaml +++ b/.prettierrc.yaml @@ -5,3 +5,10 @@ overrides: singleQuote: false experimentalTernaries: true useTabs: false + - files: ["*.md"] + options: + singleQuote: false + - files: ["*.js", "*.jsx", "*.ts", "*.tsx", "*.cjs", "*.mjs"] + options: + printWidth: 120 + singleQuote: true diff --git a/BUILD_TIMELORD.md b/BUILD_TIMELORD.md index 51ba6064a90d..c2fb431177d2 100644 --- a/BUILD_TIMELORD.md +++ b/BUILD_TIMELORD.md @@ -24,8 +24,8 @@ Timelord uses to run the VDF and prove the Proof of Time is `vdf_client` and `vdf_bench` is a utility to get a sense of a given CPU's iterations per second. - To build vdf_client set the environment variable BUILD_VDF_CLIENT to "Y". -`export BUILD_VDF_CLIENT=Y`. + `export BUILD_VDF_CLIENT=Y`. - Similarly, to build vdf_bench set the environment variable BUILD_VDF_BENCH -to "Y". `export BUILD_VDF_BENCH=Y`. + to "Y". `export BUILD_VDF_BENCH=Y`. Building and running Timelords in Windows x86-64 is not yet supported. diff --git a/CHANGELOG.md b/CHANGELOG.md index 77a7e518b9d8..d54cc06f0b05 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,333 +18,357 @@ macOS 12 (Monterey) is deprecated. This release (2.4.2) will be the last release ## 2.4.1 Chia blockchain 2024-06-25 ## What's Changed + ### Fixed -* Fixed light wallet (wallet only) syncing issues introduced in 2.4.0 + +- Fixed light wallet (wallet only) syncing issues introduced in 2.4.0 ### Known Issues -* A breaking backwards compatibility issue was introduced in 2.4.0 in the daemon RPC call `add_private_key`. We expect to resolve this in a future release. -* You cannot import or use a 12-word mnemonic key with 2.4.0 or 2.4.1. To import and use a 12-word mnemonic key we recommend you use 2.3.1. This will be resolved in a future release + +- A breaking backwards compatibility issue was introduced in 2.4.0 in the daemon RPC call `add_private_key`. We expect to resolve this in a future release. +- You cannot import or use a 12-word mnemonic key with 2.4.0 or 2.4.1. To import and use a 12-word mnemonic key we recommend you use 2.3.1. This will be resolved in a future release ### Deprecated + macOS 11 (Big Sur) is deprecated. This release (2.4.1) will be the last release to support macOS 11 ## 2.4.0 Chia blockchain 2024-06-20 ## What's Changed + ### Added -* Soft fork 5: disallow infinity G1 points as public keys in AGG_SIG_* conditions -* DL: Added support for updating multiple datastores in a single batch update -* Add unfinished block to state change event (thanks @felixbrucker) -* CHIP-0026 Mempool Updates -* Preliminary support for observer mode. Ability to add public keys via CLI - -### Changed -* Remove `tx_records` from `dl_update_multiple` RPC (breaking change) -* DL: optimizations for autoinsert and upsert -* Increase farmer fill rate to 70% -* Use Rust types for `RecentChainData`, `ProofBlockHeader` and `WeightProof` -* Use Rust version of `MerkleSet` -* Remove unused files -* Make a couple of DAOWallet methods return lists of transaction records -* Simplify `MerkleSet` by making it immutable -* Add ability to profile the farmer process -* Remove unused current_inner from PoolState -* Optimize `launcher_id_to_p2_puzzle_hash()` -* Add genesis challenge to `get_network_info` RPC -* Puzzle hash optimizations -* Optimize key derivation in the wallet -* Add optional trusted CIDR list -* Make `BLSCache` a proper class -* Split capabilities for each service -* Use kv compressed in DL batch update -* Updated gui to `electron 30.0.9` -* Bump `chia_rs` to `0.9.0` and update G1Element handling -* Bump `boto3` to `1.34.114` -* Bump `chiabip158` to `1.5.1` -* Bump `clvm` to `0.9.10` -* Bump `aiohttp` to `3.9.4` -* Bump `filelock` to `3.14.0` -* Bump `importlib-resources` to `6.4.0` -* Bump `keyring` to `25.1.0` -* Bump `dnspython` to `2.6.1` -* Bump `typing-extensions` to `4.11.0`, -* Bump `packaging` to `24.0` -* Bump `hsms` to `0.3.1`, - -### Fixed -* Add bytes type to `DerivationRecord.pubkey` -* Do not return unexpected coins from `get_coin_state` -* Fix memo plotid -* Filter out duplicate coins returned by `RequestPuzzleState` -* fix confusion between prompt and don't prompt in the plotnft CLI -* drop deprecated `authentication_public_key` from pool config -* Fixed some typos (thanks @wersfeds) -* Make sure to use no more than 61 cpus on windows (fixes #17967) -* Handle reorgs in data layer wallet -* Modify `VerifiedCredential.launch` to handle multiple source coins -* Add tx_config and extra_conditions to DID creation endpoint -* DL: Return exception and error from `get_kv_diff` when either of the hashes has no data -* Link trade cancellations with announcements -* Add coin id index to coin state batching -* Remove homebrew rpaths from `_ssl.cpython.so` on macOS during build (fixes #18099) -* Aligned `lerna` and `nx` versions -* Set permissions in DEB `postinst.sh` for chrome-sandbox (fixes #17956) + +- Soft fork 5: disallow infinity G1 points as public keys in `AGG_SIG_*` conditions +- DL: Added support for updating multiple datastores in a single batch update +- Add unfinished block to state change event (thanks @felixbrucker) +- CHIP-0026 Mempool Updates +- Preliminary support for observer mode. Ability to add public keys via CLI + +### Changed + +- Remove `tx_records` from `dl_update_multiple` RPC (breaking change) +- DL: optimizations for autoinsert and upsert +- Increase farmer fill rate to 70% +- Use Rust types for `RecentChainData`, `ProofBlockHeader` and `WeightProof` +- Use Rust version of `MerkleSet` +- Remove unused files +- Make a couple of DAOWallet methods return lists of transaction records +- Simplify `MerkleSet` by making it immutable +- Add ability to profile the farmer process +- Remove unused current_inner from PoolState +- Optimize `launcher_id_to_p2_puzzle_hash()` +- Add genesis challenge to `get_network_info` RPC +- Puzzle hash optimizations +- Optimize key derivation in the wallet +- Add optional trusted CIDR list +- Make `BLSCache` a proper class +- Split capabilities for each service +- Use kv compressed in DL batch update +- Updated gui to `electron 30.0.9` +- Bump `chia_rs` to `0.9.0` and update G1Element handling +- Bump `boto3` to `1.34.114` +- Bump `chiabip158` to `1.5.1` +- Bump `clvm` to `0.9.10` +- Bump `aiohttp` to `3.9.4` +- Bump `filelock` to `3.14.0` +- Bump `importlib-resources` to `6.4.0` +- Bump `keyring` to `25.1.0` +- Bump `dnspython` to `2.6.1` +- Bump `typing-extensions` to `4.11.0`, +- Bump `packaging` to `24.0` +- Bump `hsms` to `0.3.1`, + +### Fixed + +- Add bytes type to `DerivationRecord.pubkey` +- Do not return unexpected coins from `get_coin_state` +- Fix memo plotid +- Filter out duplicate coins returned by `RequestPuzzleState` +- fix confusion between prompt and don't prompt in the plotnft CLI +- drop deprecated `authentication_public_key` from pool config +- Fixed some typos (thanks @wersfeds) +- Make sure to use no more than 61 cpus on windows (fixes #17967) +- Handle reorgs in data layer wallet +- Modify `VerifiedCredential.launch` to handle multiple source coins +- Add tx_config and extra_conditions to DID creation endpoint +- DL: Return exception and error from `get_kv_diff` when either of the hashes has no data +- Link trade cancellations with announcements +- Add coin id index to coin state batching +- Remove homebrew rpaths from `_ssl.cpython.so` on macOS during build (fixes #18099) +- Aligned `lerna` and `nx` versions +- Set permissions in DEB `postinst.sh` for chrome-sandbox (fixes #17956) ### Deprecated + macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release to support macOS 11 ## 2.3.1 Chia blockchain 2024-04-28 ### Added -* Added `warp.green` CATs (`wUSDC.b`, `wmilliETH.b`, `wUSDC`, `wmilliETH`, `wUSDT`) to the known CAT list + +- Added `warp.green` CATs (`wUSDC.b`, `wmilliETH.b`, `wUSDC`, `wmilliETH`, `wUSDT`) to the known CAT list ## 2.3.0 Chia blockchain 2024-05-01 ### Fixed -* Fixed `Install.ps1` for PowerShell 7.4 -* Fixed readability of `Could not find parent coin` error log by printing hex and not bytes -* Fixed some shutdown log spam by ensuring signal objects for signal handlers (fixes #17578) -* Fixed negative plot sync durations not crashing the harvester (fixes #15027) (thanks @felixbrucker) -* Fixed log spam by only logging warnings about protocol mismatches for farmer and harvester -* Fixed log spam by logging rollbacks only if heights are actually deleted -* Fixed DID update metadata issue (fixes #17412) -* Fixed error codes and add more test coverage for message conditions -* Fixed non-development source install -* Fixed reorg from 0 -* Fixed (again) Datalayer download banning -* Improved timelord skip peak logic. -* Used click.Path for make_offer command filename (fixes #10920) -* Handle when xch_target_address in config doesn't decode correctly (fixes #16995) -* Delete unconfirmed Clawback TX -* tighten up the check for duplicate UnfinishedBlocks before requesting that block -* Optimized Datalayer `get_key_by_node` -* Added test for observance of melted CAT balance (fixes #17727) -* increase backwards compatibility by using default values for peer file path -* Added `--skip-keyring` option to `chia start` and use in GUI (fixes #17848) - -### Added -* Added Python 3.12 support -* Added new subscription and wallet sync protocol support (will be used by the wallet in future releases) -* Added Chip-25 Message Conditions support (https://github.com/Chia-Network/chips/pull/98) -* Added support for HTTP redirect for the pool url (thanks @felixbrucker) -* Added `use_delta_sync` option for faster wallet sync (thanks @felixbrucker) -* Added Datalayer RPC pagination. -* Added Datalayer multiple batch updates with `submit_on_chain` option. -* Added `get_network_info` RPC to daemon -* Added `new_unfinished_block2` support in Chia seeder -* Added Hint support for SpendSim - -### Changed -* Transition `FullBlock`, `BlockRecord`, `CoinSpend`, and `HeaderBlock` to rust -* Move tests - all tests and infrastructure are now included in the chia-blockchain package under `chia/_tests` -* Remove `Announcement` class in favor of `Condition` subclasses -* Remove `ignore_max_send_amount` -* Use `psutil.cpu_affinity()` instead of `os.cpu_count()` -* Stop automatic transaction pushing by wallets -* Unify transaction pushing -* For testing purposes added support for non-ssl rpc clients -* Return TXs from CATWallet and Offer creation -* Remove all install.sh code that installs python and leave it to the user to install separately -* Remove old `unhashable` special case in `Streamable` -* Optimize `validate_removals()` -* Remove support for migrating peers from legacy file format -* Set unique peer filenames when swapping to/from testnets -* Cleaner cli output for rpc client fetch errors -* Optimized Datalayer subscription handling by using a `QueuedAsyncPool` for `DataLayer.periodically_manage_data()` -* Update README.md links for wiki & faq sunset -* Update README formatting and links (thanks @bknox83) -* Turned concatenation of strings to f-strings (thanks @eukub) -* Remove dead code in `multiprocess_validation` -* Improve logging of the height-to-hash and sub-epoch-summaries cache -* Pass full version in `Handshake` (thanks @felixbrucker) -* Separate protocol versions for full_node, farmer, harvester, wallet -* Optimized v1 to v2 DB upgrade -* Datalayer: Avoid manage data loop delay for self subscriptions -* Datalayer: Don't download DAT files that are already on disk -* Datalayer: `get_proof` optimizations - use get_ancestors_optimized -* Datalayer: Optimize insert/upsert/delete by using `get_node_by_key` -* Datalayer: stop using fee config setting and remove from initial config -* Datalayer: Optimize clean_node_table's query and speedup by leveraging relaxed foreign_keys -* Enabled compression for cli rpm -* Bump `chia_rs` to `0.6.1` -* Bump `clvm_tools` to `0.4.9` -* Bump `chiavdf` to `1.1.4` -* Bump `chiapos` to `2.0.4` -* Bump `clvm` to `0.9.9` -* Bump `aiohttp` to `3.9.2` -* Bump `anyio` to `4.3.0` -* Bump `boto3` to `1.34.46` -* Bump `aiosqlite` to `0.20.0` -* Bump `colorlog` to `6.8.2` -* Bump `cryptography` to `42.0.5` -* Bump `keyring` to `24.3.1` -* Bump `dnspython` to `2.5.0` -* Bump `watchdog` to `4.0.0` -* Bump `dnslib` to `0.9.24` -* Bump `typing-extensions` to `4.10.0` + +- Fixed `Install.ps1` for PowerShell 7.4 +- Fixed readability of `Could not find parent coin` error log by printing hex and not bytes +- Fixed some shutdown log spam by ensuring signal objects for signal handlers (fixes #17578) +- Fixed negative plot sync durations not crashing the harvester (fixes #15027) (thanks @felixbrucker) +- Fixed log spam by only logging warnings about protocol mismatches for farmer and harvester +- Fixed log spam by logging rollbacks only if heights are actually deleted +- Fixed DID update metadata issue (fixes #17412) +- Fixed error codes and add more test coverage for message conditions +- Fixed non-development source install +- Fixed reorg from 0 +- Fixed (again) Datalayer download banning +- Improved timelord skip peak logic. +- Used click.Path for make_offer command filename (fixes #10920) +- Handle when xch_target_address in config doesn't decode correctly (fixes #16995) +- Delete unconfirmed Clawback TX +- tighten up the check for duplicate UnfinishedBlocks before requesting that block +- Optimized Datalayer `get_key_by_node` +- Added test for observance of melted CAT balance (fixes #17727) +- increase backwards compatibility by using default values for peer file path +- Added `--skip-keyring` option to `chia start` and use in GUI (fixes #17848) + +### Added + +- Added Python 3.12 support +- Added new subscription and wallet sync protocol support (will be used by the wallet in future releases) +- Added Chip-25 Message Conditions support (https://github.com/Chia-Network/chips/pull/98) +- Added support for HTTP redirect for the pool url (thanks @felixbrucker) +- Added `use_delta_sync` option for faster wallet sync (thanks @felixbrucker) +- Added Datalayer RPC pagination. +- Added Datalayer multiple batch updates with `submit_on_chain` option. +- Added `get_network_info` RPC to daemon +- Added `new_unfinished_block2` support in Chia seeder +- Added Hint support for SpendSim + +### Changed + +- Transition `FullBlock`, `BlockRecord`, `CoinSpend`, and `HeaderBlock` to rust +- Move tests - all tests and infrastructure are now included in the chia-blockchain package under `chia/_tests` +- Remove `Announcement` class in favor of `Condition` subclasses +- Remove `ignore_max_send_amount` +- Use `psutil.cpu_affinity()` instead of `os.cpu_count()` +- Stop automatic transaction pushing by wallets +- Unify transaction pushing +- For testing purposes added support for non-ssl rpc clients +- Return TXs from CATWallet and Offer creation +- Remove all install.sh code that installs python and leave it to the user to install separately +- Remove old `unhashable` special case in `Streamable` +- Optimize `validate_removals()` +- Remove support for migrating peers from legacy file format +- Set unique peer filenames when swapping to/from testnets +- Cleaner cli output for rpc client fetch errors +- Optimized Datalayer subscription handling by using a `QueuedAsyncPool` for `DataLayer.periodically_manage_data()` +- Update README.md links for wiki & faq sunset +- Update README formatting and links (thanks @bknox83) +- Turned concatenation of strings to f-strings (thanks @eukub) +- Remove dead code in `multiprocess_validation` +- Improve logging of the height-to-hash and sub-epoch-summaries cache +- Pass full version in `Handshake` (thanks @felixbrucker) +- Separate protocol versions for full_node, farmer, harvester, wallet +- Optimized v1 to v2 DB upgrade +- Datalayer: Avoid manage data loop delay for self subscriptions +- Datalayer: Don't download DAT files that are already on disk +- Datalayer: `get_proof` optimizations - use get_ancestors_optimized +- Datalayer: Optimize insert/upsert/delete by using `get_node_by_key` +- Datalayer: stop using fee config setting and remove from initial config +- Datalayer: Optimize clean_node_table's query and speedup by leveraging relaxed foreign_keys +- Enabled compression for cli rpm +- Bump `chia_rs` to `0.6.1` +- Bump `clvm_tools` to `0.4.9` +- Bump `chiavdf` to `1.1.4` +- Bump `chiapos` to `2.0.4` +- Bump `clvm` to `0.9.9` +- Bump `aiohttp` to `3.9.2` +- Bump `anyio` to `4.3.0` +- Bump `boto3` to `1.34.46` +- Bump `aiosqlite` to `0.20.0` +- Bump `colorlog` to `6.8.2` +- Bump `cryptography` to `42.0.5` +- Bump `keyring` to `24.3.1` +- Bump `dnspython` to `2.5.0` +- Bump `watchdog` to `4.0.0` +- Bump `dnslib` to `0.9.24` +- Bump `typing-extensions` to `4.10.0` ### Known Issues -* Please be aware that logging at `DEBUG` log level may log your local keyring passphrase to the log file. Note this is **not** your key mnemonic. + +- Please be aware that logging at `DEBUG` log level may log your local keyring passphrase to the log file. Note this is **not** your key mnemonic. ## 2.2.1 Chia blockchain 2024-03-4 ### Fixed -* Fixed issue with finding bladebit and madmax plotters in CLI and GUI (thanks @nanofarmer) -* Fixed issue with banning peers due to incorrect `INVALID_TRANSACTIONS_FILTER_HASH` and `INVALID_BLOCK_COST` log errors (#17620) + +- Fixed issue with finding bladebit and madmax plotters in CLI and GUI (thanks @nanofarmer) +- Fixed issue with banning peers due to incorrect `INVALID_TRANSACTIONS_FILTER_HASH` and `INVALID_BLOCK_COST` log errors (#17620) ## 2.2.0 Chia blockchain 2024-02-28 -* Thanks to @bhorvitz for major help debugging a performance issue during coin DB lookup - -### Fixed -* Fix TX amount calculation in trade manager (fixes #16842) -* Subscribe to DIDs that come into wallet (fixes #17242) -* Remove duplicate short option from make_offer command (fixes #17371) -* add `SerializedProgram.to()` to simplify some code -* include information for `setuptools_scm` in git archives -* fix type mismatch with `Optional[bytes]` and `bytes` in `wallet/conditions.py` -* fixed typo in `get_coin_record_by_name` docstring (thanks @Abakrombie) -* Fixed readme links (thanks @Abakrombie) -* DL: Don't allow mirrors with empty urls (fixes #16920) -* DL: Improve input for CLI `add_missing_files` (fixes #17039) -* DL: Use unsubscribe queue to relax subscriptions lock -* DL: Use Datalayer banning logic for HTTP download failures -* extend the mempool tests for timelocks, and improve error codes -* extend measured sizes for plot check with value for larger K sizes (thanks @neurosis69) -* Add a few missing type annotations -* Log string header_hash on long validation warnings -* Fix sorted for dictionary keys of both bytes/xch -* Fixed an issue where `chia wallet did transfer` command mistreats the type of `fee` -* Fix signage point message for remote harvesters with large numbers of pools -* undo BlockRecord cache insert, when DB fails -* Warn if running `install-plotter.sh` as root - -### Added -* Support for third-party, farmer-rewarded, Harvesters (Chip-22) -* Singleton fast forward -* Verify p2 delegated conditions signatures and add a new SigningMode for Tangem cards (thanks @MarvinQuevedo) -* DL: add upsert action -* DL: Add support for generating and verifying DataLayer Proofs of Inclusions `get_proof` and `verify_proof` -* Improve transparency of what full nodes are doing and where they spend their time with additional Mempool logging -* add feature to profile just the block validation -* Add `--override` flag to `make_offer` -* Add full node RPC `get_aggsig_additional_data` to get the aggsig additional data -* Add fork height & rolled_back_records to block event for metrics -* extend Block validation timing logs to measure just the CLVM and conditions -* Add support for defining a list of full node peers to connect to (thanks @felixbrucker) -* Add preliminary support for getting coin states in batches -* improve mempool reorg logic when the peak is a non-transaction block -* Add `additions` and `removals` to `get_offer_summary` API response (thanks @mikehw) -* improve handling of `UnfinishedBlock`s -* Add testnet11 constants to config if missing when configuring to run on testnet -* We have added several new translations in this release. Thanks to WNFT, advlive, hezoushe - -### Changed -* reorg optimizations -* bump `chia_rs` to `0.4.1` -* initiate phasing out of the `coin_solutions` name in JSON structs -* slight simplification to `get_min_fee_rate()` -* Remove `coin_solutions` from `SpendBundle` entirely -* use rust types for `VDFInfo`, `VDFProof` and `ClassgroupElement` -* evict entries continuously from `seen_unfinished_blocks` -* move `tools/legacy_keyring.py` to `chia/legacy/keyring.py` -* Rust `proof-of-space`, `reward chain` and `foliage` types -* DL: Compress `get_keys_values` output by hash. -* replace hardcoded value for `db_readers` (thanks @neurosis69) -* use rust types for `slots`, `SubEpochSummary` and `SubEpochData` -* Update default testnet to testnet11 -* remove old work-around for a bug in version `1.1.4` and earlier -* use rust implementation of `SerializedProgram` -* Rework block fill logic to fill blocks with more SpendBundles (transactions) -* fix typo in logging -* increase farmer block fill rate to 60% -* Force the use of `coin_puzzle_hash` index to `get_unspent_lineage_info_for_puzzle_hash` +- Thanks to @bhorvitz for major help debugging a performance issue during coin DB lookup + +### Fixed + +- Fix TX amount calculation in trade manager (fixes #16842) +- Subscribe to DIDs that come into wallet (fixes #17242) +- Remove duplicate short option from make_offer command (fixes #17371) +- add `SerializedProgram.to()` to simplify some code +- include information for `setuptools_scm` in git archives +- fix type mismatch with `Optional[bytes]` and `bytes` in `wallet/conditions.py` +- fixed typo in `get_coin_record_by_name` docstring (thanks @Abakrombie) +- Fixed readme links (thanks @Abakrombie) +- DL: Don't allow mirrors with empty urls (fixes #16920) +- DL: Improve input for CLI `add_missing_files` (fixes #17039) +- DL: Use unsubscribe queue to relax subscriptions lock +- DL: Use Datalayer banning logic for HTTP download failures +- extend the mempool tests for timelocks, and improve error codes +- extend measured sizes for plot check with value for larger K sizes (thanks @neurosis69) +- Add a few missing type annotations +- Log string header_hash on long validation warnings +- Fix sorted for dictionary keys of both bytes/xch +- Fixed an issue where `chia wallet did transfer` command mistreats the type of `fee` +- Fix signage point message for remote harvesters with large numbers of pools +- undo BlockRecord cache insert, when DB fails +- Warn if running `install-plotter.sh` as root + +### Added + +- Support for third-party, farmer-rewarded, Harvesters (Chip-22) +- Singleton fast forward +- Verify p2 delegated conditions signatures and add a new SigningMode for Tangem cards (thanks @MarvinQuevedo) +- DL: add upsert action +- DL: Add support for generating and verifying DataLayer Proofs of Inclusions `get_proof` and `verify_proof` +- Improve transparency of what full nodes are doing and where they spend their time with additional Mempool logging +- add feature to profile just the block validation +- Add `--override` flag to `make_offer` +- Add full node RPC `get_aggsig_additional_data` to get the aggsig additional data +- Add fork height & rolled_back_records to block event for metrics +- extend Block validation timing logs to measure just the CLVM and conditions +- Add support for defining a list of full node peers to connect to (thanks @felixbrucker) +- Add preliminary support for getting coin states in batches +- improve mempool reorg logic when the peak is a non-transaction block +- Add `additions` and `removals` to `get_offer_summary` API response (thanks @mikehw) +- improve handling of `UnfinishedBlock`s +- Add testnet11 constants to config if missing when configuring to run on testnet +- We have added several new translations in this release. Thanks to WNFT, advlive, hezoushe + +### Changed + +- reorg optimizations +- bump `chia_rs` to `0.4.1` +- initiate phasing out of the `coin_solutions` name in JSON structs +- slight simplification to `get_min_fee_rate()` +- Remove `coin_solutions` from `SpendBundle` entirely +- use rust types for `VDFInfo`, `VDFProof` and `ClassgroupElement` +- evict entries continuously from `seen_unfinished_blocks` +- move `tools/legacy_keyring.py` to `chia/legacy/keyring.py` +- Rust `proof-of-space`, `reward chain` and `foliage` types +- DL: Compress `get_keys_values` output by hash. +- replace hardcoded value for `db_readers` (thanks @neurosis69) +- use rust types for `slots`, `SubEpochSummary` and `SubEpochData` +- Update default testnet to testnet11 +- remove old work-around for a bug in version `1.1.4` and earlier +- use rust implementation of `SerializedProgram` +- Rework block fill logic to fill blocks with more SpendBundles (transactions) +- fix typo in logging +- increase farmer block fill rate to 60% +- Force the use of `coin_puzzle_hash` index to `get_unspent_lineage_info_for_puzzle_hash` ## 2.1.4 Chia blockchain 2024-01-10 ### Fixed -* Update chia_rs to 0.2.15 for AMD K10 architecture (fixes #16386) + +- Update chia_rs to 0.2.15 for AMD K10 architecture (fixes #16386) ### Changed -* improved CPU usage due to tight loop in `send_transaction()` -* improve performance of `total_mempool_fees()` and `total_mempool_cost()` -* reduced the default maximum peer count to 40 from 80 (only applies to new configs) -* changed to `normal` SQlite db sync option (previously was `full`) -* reduced the mempool size to 10 blocks from 50 blocks (improves performance) -* improve performance of the mempool by batch fetching items from the db +- improved CPU usage due to tight loop in `send_transaction()` +- improve performance of `total_mempool_fees()` and `total_mempool_cost()` +- reduced the default maximum peer count to 40 from 80 (only applies to new configs) +- changed to `normal` SQlite db sync option (previously was `full`) +- reduced the mempool size to 10 blocks from 50 blocks (improves performance) +- improve performance of the mempool by batch fetching items from the db ## 2.1.3 Chia blockchain 2023-12-18 ### Fixed -* Fixed a regression in 2.1.2 that could cause a farmer to fail to be able to create a block in some cases + +- Fixed a regression in 2.1.2 that could cause a farmer to fail to be able to create a block in some cases ## 2.1.2 Chia blockchain 2023-12-13 ### Fixed -* Fix deep reorgs and add tests -* Reduce possible Signage Point bursts by forwarding 4 most recent cached SPs only -* Fix condition serialization in RPC client -* Fix DID resync to not create DID wallets that don't belong to the current key -* Fix `get_block_spends` to work correctly post hard-fork -* Shutdown on startup failure and log to the log if possible -* fix issue with syncing testnet10 from 0 -* Chunk SQLite query for old TR/TX conversion (fixes #16589) -* Allow set_status to overwrite trade in store (fixes #16461) -* Add cache to wallet node preventing resend of processing TX -* Correct `FullNodeDiscovery.pending_tasks` typo without `s` -* Fix `chia wallet coins list` by adding NFT, DID, DAO_CAT to wallets denominated in mojos -* generalize JSON serializer -* Fix possible peak height race -* Fix invalid sync request -* request blocks in batches of 32 instead of 33 (saves 3% bandwidth) -* Fix `get_block_generator` fork detection -* Fix set_status accidental arg (fixes #16817) -* Fix issues with upgrading Chia via RPM by claiming ownership of `/opt/chia` in the RPM -* clean out `/opt/chia` before install and after removal of rpm - -### Added -* Allow DApps to use WalletConnect to sign customized puzzles by extending sign APIs -* Add support for lists of peers in the config (thanks @felixbrucker) -* Update to support looking up mnemonic by just the first 4 letters of each word -* Allow the daemon to use TLS v1.2 via config flag (thanks @dkackman) -* Add systemd init files to CLI-only Linux packages -* DL: remove data from the DB on unsubscribe - -### Changed -* ban peers for 10 minutes when violating consensus rules -* Remove `tx_endpoint` from `select_coins` -* DID wallet coin_added by @ytx1991 in https://github.com/Chia-Network/chia-blockchain/pull/16256 -* Use network overrides for default port for WalletPeers -* Improve clarity of legacy support policy language -* Add config option to set rpc timeout and use it for simulator tests -* rename `ClassgroupElement.from_bytes()` -* Optimize CRCAT trades -* harmonize `SerializedProgram` with `Program` -* Swap some info logs in seeder to warning -* Distinguish `insufficient_partials` from `invalid_partials` -* transition away from `__bytes__` conversion for fixed-size integers -* Use BLS from `chia_rs` and stop using `blspy` wheel in chia-blockchain -* simplify the interface to `mempool_manager.new_peak()` -* reduce redundant calls to compute the header hash -* Change `-h` to `-k` for `--key` flag for datalayer `get_value` cli command -* Update `chia_rs` to `0.2.13` -* Update `clvm_tools` to `0.4.7` -* Update `aiohttp` to `3.9.1` (fixes a WebSocket bug introduced in 3.9.0) -* Change `chia show keys --show-mnemonic-seed` to also show farmer private key (thanks xchdata1) -* Adjust ban times when unable to download properly DL DAT files -* return `List[TransactionRecord]` from nft bulk mint functions -* DL: delete full files when subscribed to a datastore per config +- Fix deep reorgs and add tests +- Reduce possible Signage Point bursts by forwarding 4 most recent cached SPs only +- Fix condition serialization in RPC client +- Fix DID resync to not create DID wallets that don't belong to the current key +- Fix `get_block_spends` to work correctly post hard-fork +- Shutdown on startup failure and log to the log if possible +- fix issue with syncing testnet10 from 0 +- Chunk SQLite query for old TR/TX conversion (fixes #16589) +- Allow set_status to overwrite trade in store (fixes #16461) +- Add cache to wallet node preventing resend of processing TX +- Correct `FullNodeDiscovery.pending_tasks` typo without `s` +- Fix `chia wallet coins list` by adding NFT, DID, DAO_CAT to wallets denominated in mojos +- generalize JSON serializer +- Fix possible peak height race +- Fix invalid sync request +- request blocks in batches of 32 instead of 33 (saves 3% bandwidth) +- Fix `get_block_generator` fork detection +- Fix set_status accidental arg (fixes #16817) +- Fix issues with upgrading Chia via RPM by claiming ownership of `/opt/chia` in the RPM +- clean out `/opt/chia` before install and after removal of rpm + +### Added + +- Allow DApps to use WalletConnect to sign customized puzzles by extending sign APIs +- Add support for lists of peers in the config (thanks @felixbrucker) +- Update to support looking up mnemonic by just the first 4 letters of each word +- Allow the daemon to use TLS v1.2 via config flag (thanks @dkackman) +- Add systemd init files to CLI-only Linux packages +- DL: remove data from the DB on unsubscribe + +### Changed + +- ban peers for 10 minutes when violating consensus rules +- Remove `tx_endpoint` from `select_coins` +- DID wallet coin_added by @ytx1991 in https://github.com/Chia-Network/chia-blockchain/pull/16256 +- Use network overrides for default port for WalletPeers +- Improve clarity of legacy support policy language +- Add config option to set rpc timeout and use it for simulator tests +- rename `ClassgroupElement.from_bytes()` +- Optimize CRCAT trades +- harmonize `SerializedProgram` with `Program` +- Swap some info logs in seeder to warning +- Distinguish `insufficient_partials` from `invalid_partials` +- transition away from `__bytes__` conversion for fixed-size integers +- Use BLS from `chia_rs` and stop using `blspy` wheel in chia-blockchain +- simplify the interface to `mempool_manager.new_peak()` +- reduce redundant calls to compute the header hash +- Change `-h` to `-k` for `--key` flag for datalayer `get_value` cli command +- Update `chia_rs` to `0.2.13` +- Update `clvm_tools` to `0.4.7` +- Update `aiohttp` to `3.9.1` (fixes a WebSocket bug introduced in 3.9.0) +- Change `chia show keys --show-mnemonic-seed` to also show farmer private key (thanks xchdata1) +- Adjust ban times when unable to download properly DL DAT files +- return `List[TransactionRecord]` from nft bulk mint functions +- DL: delete full files when subscribed to a datastore per config ## 2.1.1 Chia blockchain 2023-10-11 ### Fixed + - Changed electron version for GUI to 25.9.0 to fix whitescreen issues seen on some linux systems (fixes #16538) ## 2.1.0 Chia blockchain 2023-10-05 ### Added + - Credential Restricted CATs - Add timelock information to Trades and Transactions - Add ergonomic timelock parsing to RPCs @@ -370,6 +394,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - Bladebit Hybrid disk mode ### Changed + - Remove CAT1 UX guards - Dedup offer cancellation logic - upgrade electron-builder to 24.6.3 and Lerna to 7.1.3 @@ -394,6 +419,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - Update install-gui.sh to check Node 18 and npm 9 ### Fixed + - Fixed python3-venv in install.sh (thanks @d1m1trus) - Change include_standard_libraries for CLVM compilation default to True - add dust warning message to chia coins commands & cleanup code @@ -419,7 +445,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - add fee for cat creation - max_coin_amount should default to None in wallet send command - Add extra_conditions to special offer making -- bump chia_rs to include bugfix for new AGG_SIG_* conditions in mempool mode +- bump `chia_rs` to include bugfix for new `AGG_SIG\*` conditions in mempool mode - Fix `chia farm summary` aborting early if no local full node present (fixes #16164) (thanks @xchdata1) - fix typo in PendingTxCache - rename `chia data add_missing_files` `-f`/`--foldername` to `-d`/`--directory` @@ -428,6 +454,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - Fix glitch NFT wallet test ### Removed + - Support for MacOS 10.14 and 10.15 - Support for Chia database schema version 1 - Support for minting CATs via RPC @@ -463,7 +490,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - Move CAT_MOD from cat_loader -> cat_utils - Use a more aggresive activation schedule on testnet10 - Full_node: More set usage in subscription code -- Rename exclude_coin_* -> excluded_coin_* for consistency +- Rename `exclude_coin_*` -> `excluded_coin_*` for consistency - Add `**kwargs` to all `generate_signed_transaction` definitions - Full_node: Add `max_height` to `CoinStore.get_coin_states_by_ids` - Util: Some tweaks to `StructStream` and sized ints @@ -556,6 +583,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release ## 1.8.2 Chia blockchain 2023-06-28 ### Added + - Add `chia wallet vcs` command for Verifiable Credential operations - Add `chia wallet clawback` command for clawback operations - Add `chia wallet did` commands `get_details`, `update_metadata`, `find_lost`, `message_spend`, `transfer` @@ -565,6 +593,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - Add `confirmed` boolean to wallet RPC `get_transactions` ### Changed + - Identical spend aggregation - CAT wallet now will hint to CAT change - Move to Discord in docs and install scripts @@ -579,6 +608,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - Updated `chiavdf` to `1.0.9` ### Fixed + - Fix `chia wallet make_offer` short-option collision on `-r` (Fixes #14874) (Thanks @yyolk) - Fix `GENERATOR_MOD2` to have the same cost as `GENERATOR_MOD` - Subscribe to the change children of CATs @@ -593,11 +623,13 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release ## 1.8.1 Chia blockchain 2023-05-17 ### Changed + - Updated testnet softfork height so softfork rules take effect on testnet immediately - Move to Discord in docs and install scripts (#15193) - Optimize compact proofs ### Fixed + - Issue where CLI only listed first 50 NFTs by hardcoding `num` param when listing NFTs from CLI - Issue where wallet might display `RuntimeError: dictionary changed size during iteration` by avoiding dict changes while iterating in `handle_nft` - Issue where node had trouble keeping peers with `assert self.peak is not None` error by not adding transactions to the mempool before it has a valid peak (fixes #15217) @@ -605,6 +637,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release ## 1.8.0 Chia blockchain 2023-05-03 ### Added + - Added `chia completion` command - Added wallet_removed to `state_changes` messages to support wallet removal in GUI - Add support to `cat_spend` RPC for running TAIL @@ -613,6 +646,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - DataLayer plugin support and infrastructure ### Changed + - Fix soft fork to 60 days - Don't subscribe to all coin ids in the DB - Handle trade coins in the `try` block of `new_coin_state` @@ -633,6 +667,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - List the columns for `INSERT` into `coin_record` ### Fixed + - Disconnect untrusted peers if we find a trusted synced one - Only compile CLVM if source newer than hex - Fixed windows issues with passphrase prompt on CLI by flushing prompt (Fixes #14889) @@ -659,6 +694,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release ## 1.7.1 Chia blockchain 2023-03-22 ### Added + - `get_transaction_memo` wallet RPC - `set_wallet_resync_on_startup` wallet RPC to reset wallet sync data on wallet restart - `nft_count_nfts` wallet RPC - counts NFTs per wallet or for all wallets @@ -671,6 +707,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - `curry` Chialisp library replaces `curry-and-treehash` ### Changed + - `chia show -f` changed to output proper JSON - `Rate limiting` log messages are themselves rate limited - Notified GUI when wallets are removed @@ -686,6 +723,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - Changed mempool backend to use an in-memory SQLite DB ### Fixed + - Quieted wallet log output for `Record: ... not in mempool` (fixes #14452) - Quieted log output for `AttributeError: 'NoneType' object has no attribute '_get_extra_info` - Reduced log output for `Using previous generator for height` @@ -701,6 +739,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - Improved the accuracy of the wallet sync status indication ### Deprecated + - `curry-and-treehash` Chialisp library replaced by new `curry` library ## 1.7.0 Chia blockchain 2023-02-15 @@ -714,7 +753,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - Add gzip support to DataLayer download client (Thanks, @Chida82!) - Add proxy support to DataLayer download client (Thanks again, @Chida82!) - Add `get_timestamp_for_height` Wallet RPC for converting heights to timestamps -- Add `tools/legacy_keyring.py` to allow migration from the removed old key storage format. Available only from source installations. +- Add `tools/legacy_keyring.py` to allow migration from the removed old key storage format. Available only from source installations. - Add Arch Linux to install-gui.sh script (Thanks, @DaOneLuna!) - Add a `daemon_heartbeat` setting to config.yaml - add `trusted_max_subscribe_items` and `wallet:trusted_peers` to config.yaml @@ -1190,7 +1229,7 @@ macOS 11 (Big Sur) is deprecated. This release (2.4.0) will be the last release - Added RPCs for NFT (see ) - Enable stricter mempool rule when dealing with multiple extra arguments - Added a retry when loading pool info from a pool at 2 minute intervals -- Added CLI options `--sort-by-height` and –sort-by-relevance` to `chia wallet get_transactions` +- Added CLI options `--sort-by-height` and `-–sort-by-relevance` to `chia wallet get_transactions` - Harvester: Introduce `recursive_plot_scan` - Add libgmp-dev to Bladebit installation - thanks to @TheLastCicada - Add support for multiple of the same CAT in aggregate offers - Thanks to @roseiliend @@ -1414,7 +1453,7 @@ There is a known issue where harvesters will not reconnect to the farmer automat - Added new CLI option, chia keys derive, to allow deriving any number of keys in various ways. This is particularly useful to do an exhaustive search for a given address using chia keys derive search. - Div soft fork block height set to 2,300,000. - Added the ability to add an optional fee for creating and changing plot NFTs. -- Added *multiprocessing_start_method:* entry in config.yaml that allows setting the python *start method* for multiprocessing (default is *spawn* on Windows & MacOS, *fork* on Unix). +- Added `multiprocessing_start_method:` entry in config.yaml that allows setting the python _start method_ for multiprocessing (default is `spawn` on Windows & MacOS, `fork` on Unix). - Added option to "Cancel transaction" accepted offers that are stuck in "pending". ### Changed @@ -1479,7 +1518,7 @@ There is a known issue where harvesters will not reconnect to the farmer automat - If you start with wallet mode and then switch to farmer mode and back to wallet mode, the full node will continue to sync in the background. To get the full node to stop syncing after switching to wallet mode, you will need to close the Chia and relaunch the Chia app. - Wallets with large number of transactions or large number of coins will take longer to sync (more than a few minutes), but should take less time than a full node sync. It could fail in some cases. - Huge numbers cannot be put into amount/fee input for transactions in the GUI. -- Some Linux systems experience excessive memory usage with the value *default*/*python_default*/*fork* configured for *multiprocessing_start_method:*. Setting this value to *spawn* may produce better results, but in some uncommon cases, is know to cause crashes. +- Some Linux systems experience excessive memory usage with the value `default`/`python_default`/`fork` configured for `multiprocessing_start_method:`. Setting this value to `spawn` may produce better results, but in some uncommon cases, is know to cause crashes. - Sending a TX with too low of a fee can cause an infinite spinner in the GUI when the mempool is full. - Workaround: Restart the GUI, or clear unconfirmed TX. - Claiming rewards when self-pooling using CLI will show an error message, but it will actually create the transaction. @@ -1489,7 +1528,7 @@ There is a known issue where harvesters will not reconnect to the farmer automat ### Added - Farmers rejoice: today's release integrates two plotters in broad use in the Chia community: Bladebit, created by @harold-b, and Madmax, created by @madMAx43v3r. Both of these plotters bring significant improvements in plotting time. More plotting info [here](https://github.com/Chia-Network/chia-blockchain/wiki/Alternative--Plotters). -- This release also includes several important performance improvements as a result of last weekends "Dust Storm", with two goals in mind: make sure everyone can farm at all times, and improve how many transactions per second each node can accept, especially for low-end hardware. Please know that these optimizations are only the first wave in a series of many over the next few releases to help address this going forward. While the changes we have implemented in this update may not necessarily solve for *every* possible congestion scenario, they should go a long way towards helping low-end systems perform closer to expectations if this happens again. +- This release also includes several important performance improvements as a result of last weekends "Dust Storm", with two goals in mind: make sure everyone can farm at all times, and improve how many transactions per second each node can accept, especially for low-end hardware. Please know that these optimizations are only the first wave in a series of many over the next few releases to help address this going forward. While the changes we have implemented in this update may not necessarily solve for _every_ possible congestion scenario, they should go a long way towards helping low-end systems perform closer to expectations if this happens again. - Performance improvements for nodes to support higher transaction volumes, especially for low powered devices like RaspBerry Pi. Full details at [#9050](https://github.com/Chia-Network/chia-blockchain/pull/9050). - Improved multi-core usage through process pools. - Prioritized block validation. @@ -1535,7 +1574,7 @@ We have some great improvements in this release: We launched our migration of ke ### Changed -- Truncate points_[found,acknowledged]_24h to 24 hours at each signage point. +- Truncate points\_[found,acknowledged]\_24h to 24 hours at each signage point. - Improved reliability of test_farmer_harvester_rpc.py, by increasing the interval between harvester checks, which should avoid spamming logs with excessive plot refreshing and cache updates. - Thanks @cross for change that allows using IPv6 address in config.yaml for remote harvesters and other chia services. - Change to stop creating unused indexes in block_records and full_blocks tables. @@ -1657,15 +1696,15 @@ Today we’re releasing version 1.2.6 to address a resource bug with nodes, and - Added an option to sign bytes as well as UTF-8 strings, which is particularly helpful if you're writing Chialisp puzzles that require signatures and you want to test them without necessarily writing a whole python script for signing the relevant data. - Added a first version of .pre-commit-config.yaml and applied the changes required by the following initial hooks in separate commits. To use this you need to install pre-commit, see . - We have added many new translations in this release based on community -submissions. Thanks to @RuiZhe for Chinese, Traditional; @HansCZ for Czech; -@LUXDAD for English, Australia; @f00b4r for Finnish; @jimkoen, @ruvado for German; @Arielzikri for Hebrew; @A-Caccese for Italian; @Hodokami for Japanese; @LUXDAD for Latvian; @vaexperience for Lithuanian; @LUXDAD for Russian; @juands1644 for Spanish, Argentina; @MrDyngrak, @ordtrogen for Swedish; @richeyphu for Thai; @Ansugo, @baturman for Turkish. + submissions. Thanks to @RuiZhe for Chinese, Traditional; @HansCZ for Czech; + @LUXDAD for English, Australia; @f00b4r for Finnish; @jimkoen, @ruvado for German; @Arielzikri for Hebrew; @A-Caccese for Italian; @Hodokami for Japanese; @LUXDAD for Latvian; @vaexperience for Lithuanian; @LUXDAD for Russian; @juands1644 for Spanish, Argentina; @MrDyngrak, @ordtrogen for Swedish; @richeyphu for Thai; @Ansugo, @baturman for Turkish. ### Changed -- Thanks @altendky for Correct * to ** kwargs unpacking in time_out_assert(). +- Thanks @altendky for Correct `*` to `**` kwargs unpacking in time_out_assert(). - Thanks @altendky for changing the default to paginate to chia wallet get_transactions to address cases such as piping and output redirection to a file where the command previously just hung while waiting for the user to press c for the next page. - Removed commented-out debug breakpoints. -- Enabled Rust condition checker to add the ability to parse the output conditions from a generator program in Rust. It also validates some of the conditions in Rust. +- Enabled Rust condition checker to add the ability to parse the output conditions from a generator program in Rust. It also validates some of the conditions in Rust. - Switched IP address lookup to first use Chia's service ip.chia.net. - Made changes so that when creating SSL certificate and private key files, we ensure that files are written with the proper file permissions. - Define a new encrypted keyring format to be used to store keys, and which is optionally encrypted to a user-supplied passphrase. GUI for the passphrase will come in an upcoming release. @@ -1704,7 +1743,7 @@ submissions. Thanks to @RuiZhe for Chinese, Traditional; @HansCZ for Czech; - Thanks @aarcro for adding timing metrics to plot check. - Thanks @chadwick2143 for adding the ability to set the port to use for the harvester. - Added more friendly error reporting for peername errors. -- We have added many new translations in this release. Thanks to @L3Sota, @hodokami and @L3Sota for Japanese; @danielrangel6, @memph1x and @dvd101x for Spanish (Mexico); @fsavaget, @semnosao and @ygalvao for Portuguese (Brazilian); @juands1644 for Spanish (Argentina); @darkflare for Portuguese; @wong8888, @RuiZhe, @LM_MA, @ezio20121225, @GRIP123, @11221206 and @nicko1122 for Chinese Traditional; @atomsymbol for Slovak; @SirGeoff and @rolandfarkasCOM for Hungarian; @ordtrogen for Swedish; @HansCZ and @kafkic for Czech; @SupperDog for Chinese Simplified; @baturman and @Ansugo for Turkish; @thebacktrack for Russian; @itservicelukaswinter for German; @saeed508, @Amirr_ezA and @themehran for Persian; @hgthtung for Vietnamese; @f00b4r for Finnish; @IMIMIM for Latvian; @Rothnita and @vanntha85 for Khmer; @Rothnita and @Gammaubl for Thai; @marcin1990 for Polish; @mydienst for Bosnian; @dvd101x and @darkflare for Spanish; @ATSHOOTER for Albanian; @Munyuk81 for Indonesian; @loppefaaret for Danish; @sharjeelaziz and @nzjake for English; @nzjake for English (New Zealand). We apologize if we missed anyone and welcome corrections. +- We have added many new translations in this release. Thanks to @L3Sota, @hodokami and @L3Sota for Japanese; @danielrangel6, @memph1x and @dvd101x for Spanish (Mexico); @fsavaget, @semnosao and @ygalvao for Portuguese (Brazilian); @juands1644 for Spanish (Argentina); @darkflare for Portuguese; @wong8888, @RuiZhe, @LM_MA, @ezio20121225, @GRIP123, @11221206 and @nicko1122 for Chinese Traditional; @atomsymbol for Slovak; @SirGeoff and @rolandfarkasCOM for Hungarian; @ordtrogen for Swedish; @HansCZ and @kafkic for Czech; @SupperDog for Chinese Simplified; @baturman and @Ansugo for Turkish; @thebacktrack for Russian; @itservicelukaswinter for German; @saeed508, @Amirr_ezA and @themehran for Persian; @hgthtung for Vietnamese; @f00b4r for Finnish; @IMIMIM for Latvian; @Rothnita and @vanntha85 for Khmer; @Rothnita and @Gammaubl for Thai; @marcin1990 for Polish; @mydienst for Bosnian; @dvd101x and @darkflare for Spanish; @ATSHOOTER for Albanian; @Munyuk81 for Indonesian; @loppefaaret for Danish; @sharjeelaziz and @nzjake for English; @nzjake for English (New Zealand). We apologize if we missed anyone and welcome corrections. ### Changed @@ -1755,13 +1794,13 @@ submissions. Thanks to @RuiZhe for Chinese, Traditional; @HansCZ for Czech; ### Added - Portable pooled plots are now available using our new plot NFT. These allow you to plot new plots to an NFT that can either self farm or join and leave pools. During development there were changes to the plot NFT so portable pool plots (those made with `-c` option to `chia plots create`) using code from before June 25th are invalid on mainnet. -OG plots made before this release can continue to be farmed side by side with the new portable pool plots but can not join pools using the official pooling protocol. You can learn more as a farmer by checking out the [pool user guide](https://github.com/Chia-Network/chia-blockchain/wiki/Pooling-User-Guide). Pool operators and those wanting to understand how the official pooling protocol operates should check out our [pooling implementation reference repository](https://github.com/Chia-Network/pool-reference). If you plan to use plot NFT, all your farmers and harvesters must be on 1.2.0 to function properly for portable pool plots. + OG plots made before this release can continue to be farmed side by side with the new portable pool plots but can not join pools using the official pooling protocol. You can learn more as a farmer by checking out the [pool user guide](https://github.com/Chia-Network/chia-blockchain/wiki/Pooling-User-Guide). Pool operators and those wanting to understand how the official pooling protocol operates should check out our [pooling implementation reference repository](https://github.com/Chia-Network/pool-reference). If you plan to use plot NFT, all your farmers and harvesters must be on 1.2.0 to function properly for portable pool plots. - The exact commit after which Plot NFTs should be valid is the 89f7a4b3d6329493cd2b4bc5f346a819c99d3e7b commit (in which `pools.testnet9` branch was merged to main) or 5d62b3d1481c1e225d8354a012727ab263342c0a within the `pools.testnet9` branch. - `chia farm summary` and the GUI now use a new RPC endpoint to properly show plots for local and remote harvesters. This should address issues #6563, #5881, #3875, #1461. - `chia configure` now supports command line updates to peer count and target peer count. - Thank you @gldecurtins for adding logging support for remote syslog. - Thanks to @maran and @Animazing for adding farmer and pool public key display to the RPC. -- We have added translations for Hungarian, Belarusian, Catalan, and Albanian. For Hungarian thanks to @SirGeoff, @azazio @onokaxxx, @rolandfarkasCOM, @HUNDavid , @horvathpalzsolt, @stishun74, @tusdavgaming, @idotitusz, @rasocsabi, @mail.kope, @gsprblnt, @mbudahazi, @csiberius, @tomatos83, @zok42, @ocel0t, @rwtoptomi, @djxpitke, @ftamas85, @zotya0330, @fnni, @kapabeates, @zamery, @viktor.gonczi, @pal.suta, @miv, and @Joeman_. For Belarusian thanks to @shurix83, @haxycgm, and @metalomaniax. For Catalan thank you to @Poliwhirl, @Pep-33, @marqmarti, @meuca, @Guiwdin, @carlescampi, @jairobtx, @Neoares, @darknsis, @augustfarrerasgimeno, and @fornons. Finally for Albanian thanks to @ATSHOOTER and @lakedeejay. We apologize if we missed anyone and welcome corrections. +- We have added translations for Hungarian, Belarusian, Catalan, and Albanian. For Hungarian thanks to @SirGeoff, @azazio @onokaxxx, @rolandfarkasCOM, @HUNDavid , @horvathpalzsolt, @stishun74, @tusdavgaming, @idotitusz, @rasocsabi, @mail.kope, @gsprblnt, @mbudahazi, @csiberius, @tomatos83, @zok42, @ocel0t, @rwtoptomi, @djxpitke, @ftamas85, @zotya0330, @fnni, @kapabeates, @zamery, @viktor.gonczi, @pal.suta, @miv, and @Joeman\_. For Belarusian thanks to @shurix83, @haxycgm, and @metalomaniax. For Catalan thank you to @Poliwhirl, @Pep-33, @marqmarti, @meuca, @Guiwdin, @carlescampi, @jairobtx, @Neoares, @darknsis, @augustfarrerasgimeno, and @fornons. Finally for Albanian thanks to @ATSHOOTER and @lakedeejay. We apologize if we missed anyone and welcome corrections. - Our release process is now fully automated from tagging a release to publishing installers to all of the appropriate locations and now makes the release artifacts available via torrents as well. - All Chia repositories now automatically build M1 wheels and create a new MacOS M1 native installer. - New CLI command `chia plotnft` to manage pools. @@ -1776,7 +1815,7 @@ OG plots made before this release can continue to be farmed side by side with th - We have made a host of changes to the GUI to support pooling and to improve the wallet experience. - We updated chiapos to version 1.0.3. This adds parallel reads to GetFullProof. Thanks to @marcoabreu ! We now print target/final directory early in the logs refs and log process ID. Thanks to @grayfallstown ! We are now using Gulrak 1.5.6. -@683280 optimized code in phase1.hpp. @jespino and @mrhacky started migrating to flags instead of booleans parameters for `show_progress` and `nobitfield`. If you are providing third-party tools you may need to make adjustments if relying on the chiapos log. + @683280 optimized code in phase1.hpp. @jespino and @mrhacky started migrating to flags instead of booleans parameters for `show_progress` and `nobitfield`. If you are providing third-party tools you may need to make adjustments if relying on the chiapos log. - Updated chiavdf to version 1.0.2 to fix certain tests. - Windows builds now rely upon Python 3.9 which obviates the fix in 1.1.7. - We are now using miniupnpc version 2.2.2 so that we can support Python 3.9 on Windows. @@ -2013,7 +2052,7 @@ Batch process weight proof epochs in groups of 900 to fit below May 2020 sqlite ### Changed -- The plotter in bitfield mode is much improved in plotting speed (~15% faster than in 1.0.3), now requires 28% less temporary space (238.3 GiB/256 GB), and now uses its maximum memory in phase 1 and only needs 3389MiB for optimal sorting of a k32. Total writes should also be down by about 20%. On almost all machines we expect bitfield to be as fast or faster. For CPUs that predate the [Nehalem architecture](https://en.wikipedia.org/wiki/Nehalem_(microarchitecture)), bitfield plotting will not work and you will need to use no bitfield. Those CPUs were generally designed before 2010. +- The plotter in bitfield mode is much improved in plotting speed (~15% faster than in 1.0.3), now requires 28% less temporary space (238.3 GiB/256 GB), and now uses its maximum memory in phase 1 and only needs 3389MiB for optimal sorting of a k32. Total writes should also be down by about 20%. On almost all machines we expect bitfield to be as fast or faster. For CPUs that predate the [Nehalem architecture](), bitfield plotting will not work and you will need to use no bitfield. Those CPUs were generally designed before 2010. - The `src` directory in chia-blockchain has been changed to `chia` to avoid namespace collisions. - GUI install builds have been simplified to rely on one `.spec` file in `chia/` - The weight proof timeout can now be configured in config.yaml. @@ -2306,7 +2345,7 @@ Batch process weight proof epochs in groups of 900 to fit below May 2020 sqlite - The websocket address is no longer displayed in the GUI unless it is running as a remote GUI. Thanks @dkackman ! - `chia plots check` now will continue checking after it finds an error in a plot to the total number of checks you specified. - If you run install-gui.sh or install-timelord.sh without being in the venv, the script will warn you that you need to `. ./activate` and exit with error. -- If you attempt to install on a 32 bit Pi/ARM OS, the installer exits with a helpful error message. You can still fail when running under a 64 bit kernel but using a 32 bit Python 3. +- If you attempt to install on a 32 bit Pi/ARM OS, the installer exits with a helpful error message. You can still fail when running under a 64 bit kernel but using a 32 bit Python 3. - The application is now more aware of whether it is running a testnet or mainnet. This impacts wallet's display behavior and certain blockchain validation rules. - Interface improvements for `chia netspace`. - Now that aiosqlite included our upstream improvements we install version 0.17.0. @@ -2352,13 +2391,13 @@ Batch process weight proof epochs in groups of 900 to fit below May 2020 sqlite - We have added Italian, Russian, and Finnish. More to come soon. - There is now remote UI support. [Documents](https://github.com/Chia-Network/chia-blockchain-gui/blob/main/remote.md) will temporarily live in the repository but have moved to the [wiki](https://github.com/Chia-Network/chia-blockchain/wiki/Connecting-the-UI-to-a-remote-daemon). Thanks to @dkackman for this excellent addition! - Added the ability to specify an address for the pool when making plots (-c flag), as opposed to a public key. The block -validation was changed to allow blocks like these to be made. This will enable changing pools in the future, by specifying a smart transaction for your pool rewards. + validation was changed to allow blocks like these to be made. This will enable changing pools in the future, by specifying a smart transaction for your pool rewards. - Added `chia plots check --challenge-start [start]` that begins at a different `[start]` for `-n [challenges]`. Useful when you want to do more detailed checks on plots without restarting from lower challenge values you already have done. Huge thanks to @eFishCent for this and all of the debugging work behind the scenes confirming that plot failures were machine errors and not bugs! ### Changed - Sub blocks renamed to blocks, and blocks renamed to transaction blocks, everywhere. This effects the RPC, now -all fields that referred to sub blocks are changed to blocks. + all fields that referred to sub blocks are changed to blocks. - Base difficulty and weight have increased, so difficulty of "5" in the rc1 testnet will be equivalent to "21990232555520" in the previous testnet. - 'chia wallet send' now takes in TXCH or XCH as units instead of mojos. - Transactions have been further sped up. @@ -2671,7 +2710,7 @@ all fields that referred to sub blocks are changed to blocks. - A bug in bls-singatures/blspy could cause a stack overflow if too many signatures were verified at once. This caused the block of death at 11997 of the Beta 15 chain. Updated to 0.2.4 to address the issue. - GUI Wallet now correctly updates around reorgs. - chiapos 0.12.32 fixed a an out of bounds read that could crash the plotter. It also contains a fix to better handle the case of drive letters on Windows. -- Node would fail to start on Windows Server 2016 with lots of cores. This [python issue explains]( https://bugs.python.org/issue26903) the problem. +- Node would fail to start on Windows Server 2016 with lots of cores. This [python issue explains](https://bugs.python.org/issue26903) the problem. ### Known Issues @@ -2845,7 +2884,7 @@ all fields that referred to sub blocks are changed to blocks. - Handle disconnection and reconnection of hard drives properly. - Addressed pre-Haswell Windows signatures failing. - MacOS, Linux x64, and Linux aarch64 were not correctly compiling libsodium in -the blspy/bls-signatures library. + the blspy/bls-signatures library. - Removed outdated "200 plots" language from Plot tab. - Fixed spelling error for "folder" on Plot tab. - Various node dependency security vulnerabilities have been fixed. @@ -2861,76 +2900,76 @@ the blspy/bls-signatures library. ### Added - We have released a new plot file format. We believe that plots made in this -format and with these IETF BLS keys will work without significant changes on -mainnet at launch. + format and with these IETF BLS keys will work without significant changes on + mainnet at launch. - We now use [chacha8](https://cr.yp.to/chacha.html) and -[blake3](https://github.com/BLAKE3-team/BLAKE3) for proof of space instead of -the now deprecated AES methods. This should increase plotting speed and support -more processors. + [blake3](https://github.com/BLAKE3-team/BLAKE3) for proof of space instead of + the now deprecated AES methods. This should increase plotting speed and support + more processors. - Plot refreshing happens during all new challenges and only new/modified files -are read. + are read. - Updated [blspy](https://github.com/Chia-Network/bls-signatures) to use the -new [IETF standard for BLS signatures](https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-02). + new [IETF standard for BLS signatures](https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-02). - Added a faster VDF process which generates n-wesolowski proofs quickly -after the VDF result is known. This requires a high number of CPUs. To use it, -set timelord.fast_algorithm = True in the config file. + after the VDF result is known. This requires a high number of CPUs. To use it, + set timelord.fast_algorithm = True in the config file. - Added a new type of timelord helper - blue boxes, which generate compact -proofs of time for existing proven blocks. This helps reducing the database -size and speeds up syncing a node for new users joining the network. Full nodes -send 100 random un-compact blocks per hour to blue boxes, and if -timelord.sanitizer_mode = True, the blue box timelord will work on those -challenges. Unlike the main timelord, average machines can run blue boxes -and contribute to the chain. Expect improvements to the install method for -blue boxes in future releases. + proofs of time for existing proven blocks. This helps reducing the database + size and speeds up syncing a node for new users joining the network. Full nodes + send 100 random un-compact blocks per hour to blue boxes, and if + timelord.sanitizer_mode = True, the blue box timelord will work on those + challenges. Unlike the main timelord, average machines can run blue boxes + and contribute to the chain. Expect improvements to the install method for + blue boxes in future releases. - From the UI you can add a directory that harvester will always check for -existing and new plots. Harvester will only look in the specific directory you -specify so you'll have to add any subfolders you want to also contain plots. + existing and new plots. Harvester will only look in the specific directory you + specify so you'll have to add any subfolders you want to also contain plots. - The UI now asks for confirmation before closing and shows shutdown progress. - UI now tries to shut down servers gracefully before exiting, and also closes -the daemon before starting. + the daemon before starting. - The various sub repositories (chiapos, chiavdf, etc.) now build ARM64 binary -wheels for Linux with Python 3.8. This makes installing on Ubuntu 20.04 lts on -a Raspberry Pi 3 or 4 easy. + wheels for Linux with Python 3.8. This makes installing on Ubuntu 20.04 lts on + a Raspberry Pi 3 or 4 easy. - Ci's check to see if they have secret access and attempt to fail cleanly so -that ci runs successfully complete from PRs or forked repositories. + that ci runs successfully complete from PRs or forked repositories. - Farmer now sends challenges after a handshake with harvester. - The bls-signatures binary wheels include libsodium on all but Windows which -we expect to add in future releases. + we expect to add in future releases. - The chia executable is now available if installing from the Windows or MacOS -Graphical installer. Try `./chia -h` from -`~\AppData\Local\Chia-Blockchain\app-0.1.8\resources\app.asar.unpacked\daemon\` -in Windows or -`/Applications/Chia.app/Contents/Resources/app.asar.unpacked/daemon` on MacOS. + Graphical installer. Try `./chia -h` from + `~\AppData\Local\Chia-Blockchain\app-0.1.8\resources\app.asar.unpacked\daemon\` + in Windows or + `/Applications/Chia.app/Contents/Resources/app.asar.unpacked/daemon` on MacOS. ### Changed - Minor changes have been made across the repositories to better support -compiling on OpenBSD. HT @n1000. + compiling on OpenBSD. HT @n1000. - Changed XCH units to TXCH units for testnet. - A push to a branch will cancel all ci runs still running for that branch. - Ci's now cache pip and npm caches between runs. - Improve test speed with smaller discriminants, less blocks, less keys, and -smaller plots. + smaller plots. - RPC servers and clients were refactored. - The keychain no longer supports old keys that don't have mnemonics. - The keychain uses BIP39 for seed derivation, using the "" passphrase, and -also stores public keys. -- Plots.yaml has been replaced. Plot secret keys are stored in the plots, - and a list of directories that harvester can find plots in are in config.yaml. -You can move plots around to any directory in config.yaml as long as the farmer -has the correct farmer's secret key too. + also stores public keys. +- Plots.yaml has been replaced. Plot secret keys are stored in the plots, + and a list of directories that harvester can find plots in are in config.yaml. + You can move plots around to any directory in config.yaml as long as the farmer + has the correct farmer's secret key too. - Auto scanning of plot directories for .plot files. - The block header format was changed (puzzle hashes and pool signature). - Coinbase and fees coin are now in merkle set, and bip158 filter. - New harvester protocol with 2/2 harvester and farmer signatures, and modified -farmer and full node protocols. + farmer and full node protocols. - 255/256 filter which allows virtually unlimited plots per harvester or drive. - Improved create_plots and check_plots scripts, which are now -"chia plots create" and "chia plots check". + "chia plots create" and "chia plots check". - Add plot directories to config.yaml from the cli with "chia plots add". - Use real plot sizes in UI instead of a formula/ - HD keys now use EIP 2333 format instead of BIP32, for compatibility with -other chains. + other chains. - Keys are now derived with the EIP 2334 (m/12381/8444/a/b). - Removed the ability to pass in sk_seed to plotting, to increase security. - Linux builds of chiavdf and blspy now use a fresh build of gmp 6.2.1. @@ -2940,13 +2979,13 @@ other chains. - uPnP now works on Windows. - Log rotation should now properly rotate every 20MB and keep 7 historical logs. - Node had a significant memory leak under load due to an extraneous fork -in the network code. + in the network code. - Skylake processors on Windows without AVX would fail to run. - Harvester no longer runs into 512 maximum file handles open issue on Windows. - The version generator for new installers incorrectly handled the "dev" -versions after a release tag. + versions after a release tag. - Due to a python bug, ssl connections could randomly fail. Worked around -[Python issue 29288](https://bugs.python.org/issue29288) + [Python issue 29288](https://bugs.python.org/issue29288) - Removed websocket max message limit, allowing for more plots - Daemon was crashing when websocket gets improperly closed @@ -2954,14 +2993,14 @@ versions after a release tag. - All keys generated before Beta 1.8 are of an old format and no longer useful. - All plots generated before Beta 1.8 are no longer compatible with testnet and -should be deleted. + should be deleted. ### Known Issues - For Windows users on pre Haswell CPUs there is a known issue that causes -"Given G1 element failed g1_is_valid check" when attempting to generate -keys. This is a regression from our previous fix when it was upstreamed into -relic. We will make a patch available for these systems shortly. + "Given G1 element failed g1_is_valid check" when attempting to generate + keys. This is a regression from our previous fix when it was upstreamed into + relic. We will make a patch available for these systems shortly. ## [1.0beta7] aka Beta 1.7 - 2020-06-08 @@ -3158,7 +3197,7 @@ relic. We will make a patch available for these systems shortly. ### Added - There is now full transaction support on the Chia blockchain. In this initial Beta 1.0 release, all transaction types are supported though the wallets and UIs currently only directly support basic transactions like coinbase rewards and sending coins while paying fees. UI support for our [smart transactions](https://github.com/Chia-Network/wallets/blob/main/README.md) will be available in the UIs shortly. -- Wallet and Node GUI’s are available on Windows, Mac, and desktop Linux platforms. We now use an Electron UI that is a full light client wallet that can also serve as a node UI. Our Windows Electron Wallet can run standalone by connecting to other nodes on the network or another node you run. WSL 2 on Windows can run everything except the Wallet but you can run the Wallet on the native Windows side of the same machine. Also the WSL 2 install process is 3 times faster and *much* easier. Windows native node/farmer/plotting functionality are coming soon. +- Wallet and Node GUI’s are available on Windows, Mac, and desktop Linux platforms. We now use an Electron UI that is a full light client wallet that can also serve as a node UI. Our Windows Electron Wallet can run standalone by connecting to other nodes on the network or another node you run. WSL 2 on Windows can run everything except the Wallet but you can run the Wallet on the native Windows side of the same machine. Also the WSL 2 install process is 3 times faster and _much_ easier. Windows native node/farmer/plotting functionality are coming soon. - Install is significantly easier with less dependencies on all supported platforms. - If you’re a farmer you can use the Wallet to keep track of your earnings. Either use the same keys.yaml on the same machine or copy the keys.yaml to another machine where you want to track of and spend your coins. - We have continued to make improvements to the speed of VDF squaring, creating a VDF proof, and verifying a VDF proof. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 8998906234a1..59adbe7884bc 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -14,22 +14,22 @@ appearance, race, religion, or sexual identity and orientation. Examples of behavior that contributes to creating a positive environment include: -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members +- Using welcoming and inclusive language +- Being respectful of differing viewpoints and experiences +- Gracefully accepting constructive criticism +- Focusing on what is best for the community +- Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery and unwelcome sexual attention or - advances -* Insulting/derogatory comments, and personal or political attacks, or excessive trolling. -* Public or private harassment -* Publishing others' private information, such as a physical or electronic - address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting +- The use of sexualized language or imagery and unwelcome sexual attention or + advances +- Insulting/derogatory comments, and personal or political attacks, or excessive trolling. +- Public or private harassment +- Publishing others' private information, such as a physical or electronic + address, without explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting ## Our Responsibilities diff --git a/LEGACY-SUPPORT-POLICY.md b/LEGACY-SUPPORT-POLICY.md index fbe166a1d0ab..3050cecc3b67 100644 --- a/LEGACY-SUPPORT-POLICY.md +++ b/LEGACY-SUPPORT-POLICY.md @@ -1,6 +1,6 @@ # Legacy Software and Operating System Support Policy -It is the official policy of the Chia Blockchain project to end software support when the original maintainer of the software deems it End of Life (EOL) or stops providing support. The most relevant targets of this policy are Python, Node.js, and operating systems. +It is the official policy of the Chia Blockchain project to end software support when the original maintainer of the software deems it End of Life (EOL) or stops providing support. The most relevant targets of this policy are Python, Node.js, and operating systems. ## Long-term Support Conflicts diff --git a/PRETTY_GOOD_PRACTICES.md b/PRETTY_GOOD_PRACTICES.md index 7bc2a833692f..be0e92541560 100644 --- a/PRETTY_GOOD_PRACTICES.md +++ b/PRETTY_GOOD_PRACTICES.md @@ -115,7 +115,6 @@ def use_open_directly(path: Path): return file.read() ``` - ### Context managers for single use scenarios Even when no reuse is necessary there are still reasons to use context managers. @@ -146,13 +145,11 @@ def f(x, y, z): z.process(x, y) ``` - ### Examples - https://github.com/Chia-Network/chia-blockchain/pull/11467 - https://github.com/Chia-Network/chia-blockchain/pull/10166 - ## Classes There are a few basic goals for classes that are targeted by the guidance provided below. @@ -437,7 +434,6 @@ class ThreeIntAdder(Protocol): Another option for additional expressivity around hinting a callable with a protocol is to use overloads to narrow the possible combinations of calls. It is often better to just avoid overload situations, but as we retrofit hints to existing code we may prefer this option sometimes. - ### Type variables `TypeVar` allows you to create 'variables' for type hints. @@ -617,8 +613,8 @@ class SomeWallet: ## Tests -- Do not import `test_*` modules. Instead locate shared tooling in non-test files within the `tests/` directory or subdirectories. -- Do not import fixtures. Fixtures are shared by locating them in `conftest.py` files at whatever directory layer you want them to be recursively available from. +- Do not import `test_*` modules. Instead locate shared tooling in non-test files within the `tests/` directory or subdirectories. +- Do not import fixtures. Fixtures are shared by locating them in `conftest.py` files at whatever directory layer you want them to be recursively available from. - Do not use test classes. `unittest` requires that tests be held in a class. pytest does not. @@ -647,7 +643,7 @@ class SomeWallet: ## Idioms -- Avoid use of non-booleans as booleans such as `if the_list:`. If you mean `if len(the_list) > 0:` write that, if you mean `if an_optional_thing is not None:` write that. +- Avoid use of non-booleans as booleans such as `if the_list:`. If you mean `if len(the_list) > 0:` write that, if you mean `if an_optional_thing is not None:` write that. ## Exceptions @@ -665,7 +661,6 @@ You want the code to fail quickly and clearly with a `NameError`, not silently c This is why linters discourage bare `except:` and overly broad `except Exception:` clauses. Especially don't `except BaseException:` as that can consume even shutdown requests. - ```python from datetime import datetime diff --git a/README.md b/README.md index 5b2cb4baee2a..bcbec273f809 100644 --- a/README.md +++ b/README.md @@ -2,20 +2,21 @@ [![Chia Network logo][logo-chia]][link-chia] -| Releases | Repo Stats | Socials | -| -------- | ---------- | ------- | -[![Latest Release][badge-release]][link-latest]
[![Latest RC][badge-rc]][link-release]
[![Latest Beta][badge-beta]][link-release] | [![Coverage][badge-coverage]][link-coverage]
[![Downloads][badge-downloads]][link-downloads]
[![Commits][badge-commits]][link-commits]
[![Contributers][badge-contributers]][link-contributers] | [![Discord][badge-discord]][link-discord]
[![YouTube][badge-youtube]][link-youtube]
[![Reddit][badge-reddit]][link-reddit]
[![Twitter][badge-twitter]][link-twitter] +| Releases | Repo Stats | Socials | +| ----------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| [![Latest Release][badge-release]][link-latest]
[![Latest RC][badge-rc]][link-release]
[![Latest Beta][badge-beta]][link-release] | [![Coverage][badge-coverage]][link-coverage]
[![Downloads][badge-downloads]][link-downloads]
[![Commits][badge-commits]][link-commits]
[![Contributers][badge-contributers]][link-contributers] | [![Discord][badge-discord]][link-discord]
[![YouTube][badge-youtube]][link-youtube]
[![Reddit][badge-reddit]][link-reddit]
[![Twitter][badge-twitter]][link-twitter] | Chia is a modern cryptocurrency built from scratch, designed to be efficient, decentralized, and secure. Here are some of the features and benefits: -* [Proof of space and time][link-consensus] based consensus which allows anyone to farm with commodity hardware -* Very easy to use full node and farmer GUI and cli (thousands of nodes active on mainnet) -* [Chia seeder][link-seeder], which maintains a list of reliable nodes within the Chia network via a built-in DNS server. -* Simplified UTXO based transaction model, with small on-chain state -* Lisp-style Turing-complete functional [programming language][link-chialisp] for money related use cases -* BLS keys and aggregate signatures (only one signature per block) -* [Pooling protocol][link-pool] that allows farmers to have control of making blocks -* Support for light clients with fast, objective syncing -* A growing community of farmers and developers around the world + +- [Proof of space and time][link-consensus] based consensus which allows anyone to farm with commodity hardware +- Very easy to use full node and farmer GUI and cli (thousands of nodes active on mainnet) +- [Chia seeder][link-seeder], which maintains a list of reliable nodes within the Chia network via a built-in DNS server. +- Simplified UTXO based transaction model, with small on-chain state +- Lisp-style Turing-complete functional [programming language][link-chialisp] for money related use cases +- BLS keys and aggregate signatures (only one signature per block) +- [Pooling protocol][link-pool] that allows farmers to have control of making blocks +- Support for light clients with fast, objective syncing +- A growing community of farmers and developers around the world Please check out the [Chia website][link-chia], the [Intro to Chia][link-intro], and [FAQ][link-faq] for information on this project. @@ -38,39 +39,37 @@ Install instructions are available in the [Installation Details][link-install] s Once installed, an [Intro to Chia][link-intro] guide is available in the [Chia Docs][link-docs]. -[badge-beta]: https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fdownload.chia.net%2Flatest%2Fbadge-data-beta.json&query=%24.message&logo=chianetwork&logoColor=black&label=Latest%20Beta&labelColor=%23e9fbbc&color=%231e2b2e -[badge-beta2]: https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fdownload.chia.net%2Flatest%2Fbadge-data-beta.json&query=%24.message&logo=chianetwork&logoColor=%23e9fbbc&label=Latest%20Beta&labelColor=%23474748&color=%231e2b2e&link=https%3A%2F%2Fgithub.com%2FChia-Network%2Fchia-blockchain%2Freleases&link=https%3A%2F%2Fgithub.com%2FChia-Network%2Fchia-blockchain%2Freleases -[badge-commits]: https://img.shields.io/github/commit-activity/w/Chia-Network/chia-blockchain?logo=GitHub +[badge-beta]: https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fdownload.chia.net%2Flatest%2Fbadge-data-beta.json&query=%24.message&logo=chianetwork&logoColor=black&label=Latest%20Beta&labelColor=%23e9fbbc&color=%231e2b2e +[badge-beta2]: https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fdownload.chia.net%2Flatest%2Fbadge-data-beta.json&query=%24.message&logo=chianetwork&logoColor=%23e9fbbc&label=Latest%20Beta&labelColor=%23474748&color=%231e2b2e&link=https%3A%2F%2Fgithub.com%2FChia-Network%2Fchia-blockchain%2Freleases&link=https%3A%2F%2Fgithub.com%2FChia-Network%2Fchia-blockchain%2Freleases +[badge-commits]: https://img.shields.io/github/commit-activity/w/Chia-Network/chia-blockchain?logo=GitHub [badge-contributers]: https://img.shields.io/github/contributors/Chia-Network/chia-blockchain?logo=GitHub -[badge-coverage]: https://img.shields.io/coverallsCoverage/github/Chia-Network/chia-blockchain?logo=Coveralls&logoColor=red&labelColor=%23212F39 -[badge-discord]: https://dcbadge.vercel.app/api/server/chia?style=flat-square&theme=full-presence -[badge-discord2]: https://img.shields.io/discord/1034523881404370984.svg?label=Discord&logo=discord&colorB=1e2b2f -[badge-downloads]: https://img.shields.io/github/downloads/Chia-Network/chia-blockchain/total?logo=GitHub -[badge-rc]: https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fdownload.chia.net%2Flatest%2Fbadge-data-rc.json&query=%24.message&logo=chianetwork&logoColor=white&label=Latest%20RC&labelColor=%230d3349&color=%23474748 -[badge-reddit]: https://img.shields.io/reddit/subreddit-subscribers/chia?style=flat-square&logo=reddit&labelColor=%230b1416&color=%23222222 -[badge-release]: https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fdownload.chia.net%2Flatest%2Fbadge-data.json&query=%24.message&logo=chianetwork&label=Latest%20Release&labelColor=%231e2b2e&color=%230d3349 -[badge-twitter]: https://img.shields.io/twitter/follow/chia_project?style=flat-square&logo=x.org&logoColor=white&labelColor=black -[badge-youtube]: https://img.shields.io/youtube/channel/subscribers/UChFkJ3OAUvnHZdiQISWdWPA?style=flat-square&logo=youtube&logoColor=%23ff0000&labelColor=%230f0f0f&color=%23272727 - -[link-chia]: https://www.chia.net/ -[link-chialisp]: https://chialisp.com/ -[link-commits]: https://github.com/Chia-Network/chia-blockchain/commits/main/ -[link-consensus]: https://docs.chia.net/consensus-intro/ -[link-contributers]: https://github.com/Chia-Network/chia-blockchain/graphs/contributors -[link-coverage]: https://coveralls.io/github/Chia-Network/chia-blockchain -[link-discord]: https://discord.gg/chia -[link-docs]: https://docs.chia.net/docs-home/ -[link-downloads]: https://www.chia.net/downloads/ -[link-faq]: https://docs.chia.net/faq/ -[link-install]: https://docs.chia.net/installation/ -[link-intro]: https://docs.chia.net/introduction/ -[link-latest]: https://github.com/Chia-Network/chia-blockchain/releases/latest -[link-pool]: https://docs.chia.net/pool-farming/ -[link-reddit]: https://www.reddit.com/r/chia/ -[link-release]: https://github.com/Chia-Network/chia-blockchain/releases -[link-seeder]: https://docs.chia.net/guides/seeder-user-guide/ -[link-twitter]: https://twitter.com/chia_project -[link-upnp]: https://www.homenethowto.com/ports-and-nat/upnp-automatic-port-forward/ -[link-youtube]: https://www.youtube.com/chianetwork - -[logo-chia]: https://www.chia.net/wp-content/uploads/2022/09/chia-logo.svg "Chia logo" +[badge-coverage]: https://img.shields.io/coverallsCoverage/github/Chia-Network/chia-blockchain?logo=Coveralls&logoColor=red&labelColor=%23212F39 +[badge-discord]: https://dcbadge.vercel.app/api/server/chia?style=flat-square&theme=full-presence +[badge-discord2]: https://img.shields.io/discord/1034523881404370984.svg?label=Discord&logo=discord&colorB=1e2b2f +[badge-downloads]: https://img.shields.io/github/downloads/Chia-Network/chia-blockchain/total?logo=GitHub +[badge-rc]: https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fdownload.chia.net%2Flatest%2Fbadge-data-rc.json&query=%24.message&logo=chianetwork&logoColor=white&label=Latest%20RC&labelColor=%230d3349&color=%23474748 +[badge-reddit]: https://img.shields.io/reddit/subreddit-subscribers/chia?style=flat-square&logo=reddit&labelColor=%230b1416&color=%23222222 +[badge-release]: https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fdownload.chia.net%2Flatest%2Fbadge-data.json&query=%24.message&logo=chianetwork&label=Latest%20Release&labelColor=%231e2b2e&color=%230d3349 +[badge-twitter]: https://img.shields.io/twitter/follow/chia_project?style=flat-square&logo=x.org&logoColor=white&labelColor=black +[badge-youtube]: https://img.shields.io/youtube/channel/subscribers/UChFkJ3OAUvnHZdiQISWdWPA?style=flat-square&logo=youtube&logoColor=%23ff0000&labelColor=%230f0f0f&color=%23272727 +[link-chia]: https://www.chia.net/ +[link-chialisp]: https://chialisp.com/ +[link-commits]: https://github.com/Chia-Network/chia-blockchain/commits/main/ +[link-consensus]: https://docs.chia.net/consensus-intro/ +[link-contributers]: https://github.com/Chia-Network/chia-blockchain/graphs/contributors +[link-coverage]: https://coveralls.io/github/Chia-Network/chia-blockchain +[link-discord]: https://discord.gg/chia +[link-docs]: https://docs.chia.net/docs-home/ +[link-downloads]: https://www.chia.net/downloads/ +[link-faq]: https://docs.chia.net/faq/ +[link-install]: https://docs.chia.net/installation/ +[link-intro]: https://docs.chia.net/introduction/ +[link-latest]: https://github.com/Chia-Network/chia-blockchain/releases/latest +[link-pool]: https://docs.chia.net/pool-farming/ +[link-reddit]: https://www.reddit.com/r/chia/ +[link-release]: https://github.com/Chia-Network/chia-blockchain/releases +[link-seeder]: https://docs.chia.net/guides/seeder-user-guide/ +[link-twitter]: https://twitter.com/chia_project +[link-upnp]: https://www.homenethowto.com/ports-and-nat/upnp-automatic-port-forward/ +[link-youtube]: https://www.youtube.com/chianetwork +[logo-chia]: https://www.chia.net/wp-content/uploads/2022/09/chia-logo.svg "Chia logo" From 2dbd5b3b6774b2e778c15b90491839b4a02fe118 Mon Sep 17 00:00:00 2001 From: peicuiping <168072318+peicuiping@users.noreply.github.com> Date: Tue, 16 Jul 2024 03:36:59 +0800 Subject: [PATCH 70/77] chore: remove repetitive words (#17996) --- chia/_tests/plot_sync/test_receiver.py | 2 +- chia/_tests/wallet/dao_wallet/test_dao_clvm.py | 2 +- chia/cmds/init.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/chia/_tests/plot_sync/test_receiver.py b/chia/_tests/plot_sync/test_receiver.py index f5602067550d..7920d128824e 100644 --- a/chia/_tests/plot_sync/test_receiver.py +++ b/chia/_tests/plot_sync/test_receiver.py @@ -131,7 +131,7 @@ def post_function_validate(receiver: Receiver, data: Union[List[Plot], List[str] async def run_sync_step(receiver: Receiver, sync_step: SyncStepData) -> None: assert receiver.current_sync().state == sync_step.state last_sync_time_before = receiver._last_sync.time_done - # For the the list types invoke the trigger function in batches + # For the list types invoke the trigger function in batches if sync_step.payload_type == PlotSyncPlotList or sync_step.payload_type == PlotSyncPathList: step_data, _ = sync_step.args assert len(step_data) == 10 diff --git a/chia/_tests/wallet/dao_wallet/test_dao_clvm.py b/chia/_tests/wallet/dao_wallet/test_dao_clvm.py index 7840d143681c..a9b46f6da798 100644 --- a/chia/_tests/wallet/dao_wallet/test_dao_clvm.py +++ b/chia/_tests/wallet/dao_wallet/test_dao_clvm.py @@ -98,7 +98,7 @@ def test_proposal() -> None: ) self_destruct_time = 1000 # number of blocks oracle_spend_delay = 10 - active_votes_list = [0xFADEDDAB] # are the the ids of previously voted on proposals? + active_votes_list = [0xFADEDDAB] # are the ids of previously voted on proposals? acs: Program = Program.to(1) acs_ph: bytes32 = acs.get_tree_hash() diff --git a/chia/cmds/init.py b/chia/cmds/init.py index 0c7baf3d4327..ce53a197d4dc 100644 --- a/chia/cmds/init.py +++ b/chia/cmds/init.py @@ -40,7 +40,7 @@ def init_cmd( - Make a copy of your Farming Machine CA directory: ~/.chia/[version]/config/ssl/ca - Shut down all chia daemon processes with `chia stop all -d` - Run `chia init -c [directory]` on your remote harvester, - where [directory] is the the copy of your Farming Machine CA directory + where [directory] is the copy of your Farming Machine CA directory - Get more details on remote harvester on Chia wiki: https://github.com/Chia-Network/chia-blockchain/wiki/Farming-on-many-machines """ From d5c1bb9ede9261df022158aeaf24abd09b946a6e Mon Sep 17 00:00:00 2001 From: "SpaceFarmers.io" <97022238+spacefarmers@users.noreply.github.com> Date: Mon, 15 Jul 2024 21:37:31 +0200 Subject: [PATCH 71/77] Trusted wallet peer enhancements (#17872) * Show trusted (wallet) peers * Warn when the subscription limit has been reached * Revert: (39022fe) Warn when the subscription limit has been reached --- chia/cmds/peer_funcs.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/chia/cmds/peer_funcs.py b/chia/cmds/peer_funcs.py index cf8e3877f7a6..b678a661e037 100644 --- a/chia/cmds/peer_funcs.py +++ b/chia/cmds/peer_funcs.py @@ -102,6 +102,8 @@ async def print_connections(rpc_client: RpcClient, trusted_peers: Dict[str, Any] f"{last_connect} " f"{mb_up:7.1f}|{mb_down:<7.1f}" ) + if trusted: + con_str += f" -Trusted: {trusted}" print(con_str) From f438b4fee1efc4d241bdca879a5e8bf5ed7e6c73 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Mon, 15 Jul 2024 16:34:26 -0400 Subject: [PATCH 72/77] prettier again (#18307) --- CHANGELOG.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d54cc06f0b05..bc8daa396953 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,12 +7,16 @@ and this project does not yet adhere to [Semantic Versioning](https://semver.org for setuptools_scm/PEP 440 reasons. ## 2.4.2 Chia blockchain 2024-07-10 + ## What's Changed + ### Fixed -* Fix 12-word mnemonic support in keychain (Fixes #18243) -* Fix backwards compatibility for daemon RPC `add_private_key` + +- Fix 12-word mnemonic support in keychain (Fixes #18243) +- Fix backwards compatibility for daemon RPC `add_private_key` ### Deprecated + macOS 12 (Monterey) is deprecated. This release (2.4.2) will be the last release to support macOS 12 ## 2.4.1 Chia blockchain 2024-06-25 From 0b6360f6014d58098015bf4931e0af3c037f3c70 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Mon, 15 Jul 2024 16:35:49 -0400 Subject: [PATCH 73/77] refactor installer publish jobs (#18220) * first pass * correct linux * some macos * windows * rpm * tidy * remove extension from variable names * some * yep, we use it * matchy matchy * fixup * fixup * almost the same * fixup * fixup * fixup * maybe * fixup * INSTALL_PATH * fixup * fixup * file-arch-name * fixup * fixup * fixup * windows diagnostics * fixup * fixup * windows diagnostics * fixup * fixup * absolute * one more time (until the next time) * more * yep * maybe * perhaps * possibly * tidy * tidy * more generic * correct endpoints for gui vs. cli glue calls --- .../workflows/build-linux-installer-deb.yml | 143 +++++++++-------- .../workflows/build-linux-installer-rpm.yml | 137 +++++++++------- .github/workflows/build-macos-installers.yml | 136 +++++++++------- .github/workflows/build-windows-installer.yml | 147 ++++++++++++------ .github/workflows/reflow-version.yml | 8 +- build_scripts/build_macos-2-installer.sh | 10 +- 6 files changed, 359 insertions(+), 222 deletions(-) diff --git a/.github/workflows/build-linux-installer-deb.yml b/.github/workflows/build-linux-installer-deb.yml index 8d65b6df9019..118c94a7c2ea 100644 --- a/.github/workflows/build-linux-installer-deb.yml +++ b/.github/workflows/build-linux-installer-deb.yml @@ -57,10 +57,12 @@ jobs: arch: amd64 madmax-suffix: "x86-64" bladebit-suffix: "ubuntu-x86-64.tar.gz" + arch-artifact-name: intel - runs-on: [Linux, ARM64] arch: arm64 madmax-suffix: "arm64" bladebit-suffix: "ubuntu-arm64.tar.gz" + arch-artifact-name: arm env: CHIA_INSTALLER_VERSION: ${{ needs.version.outputs.chia-installer-version }} @@ -140,7 +142,7 @@ jobs: with: python-version: ${{ matrix.python-version }} development: true - constraints-file-artifact-name: constraints-file-${{ matrix.os.arch }} + constraints-file-artifact-name: constraints-file-${{ matrix.os.arch-artifact-name }} - uses: chia-network/actions/activate-venv@main @@ -176,34 +178,72 @@ jobs: - name: Upload Linux artifacts uses: actions/upload-artifact@v4 with: - name: chia-installers-linux-deb-${{ matrix.os.arch }} - path: ${{ github.workspace }}/build_scripts/final_installer/ + name: chia-installers-linux-deb-${{ matrix.os.arch-artifact-name }} + path: build_scripts/final_installer/ - name: Remove working files to exclude from cache run: | rm -rf ./chia-blockchain-gui/packages/gui/daemon publish: - name: Publish ${{ matrix.os.arch }} + name: Publish ${{ matrix.arch.name }} ${{ matrix.mode.name }} ${{ matrix.os.file-type.name }} runs-on: ubuntu-latest needs: - version - build - timeout-minutes: ${{ matrix.os.timeout }} + timeout-minutes: 5 strategy: fail-fast: false matrix: python-version: ["3.10"] os: - - arch: amd64 - glue-name: "build-amd64-deb" - timeout: 5 - - arch: arm64 - glue-name: "build-arm64-deb" - timeout: 5 + - matrix: debian + file-type: + name: DEB + extension: deb + glue-name: deb + artifact-platform-name: linux + file-arch-name: + arm: arm64 + intel: amd64 + file-suffix: + arm: "" + intel: "" + names: + cli: + file: chia-blockchain-cli_{0}-1_{2}.deb + dev-file: chia-blockchain-cli_{1}-1_{2}.deb + latest-dev-file: chia-blockchain-cli_{2}_latest_dev.deb + gui: + file: chia-blockchain_{0}_{2}.deb + dev-file: chia-blockchain_{1}_{2}.deb + latest-dev-file: chia-blockchain_{2}_latest_dev.deb + mode: + - name: GUI + matrix: gui + glue-name: gui + - name: CLI + matrix: cli + glue-name: cli + arch: + - name: ARM64 + matrix: arm + artifact-name: arm + glue-name: arm + - name: Intel + matrix: intel + artifact-name: intel + glue-name: intel env: - CHIA_INSTALLER_VERSION: ${{ needs.version.outputs.chia-installer-version }} + FILE: ${{ format(matrix.os.names[matrix.mode.matrix].file, needs.version.outputs.chia-installer-version, needs.version.outputs.chia-dev-version, matrix.os.file-arch-name[matrix.arch.matrix], matrix.os.file-suffix[matrix.arch.matrix]) }} + DEV_FILE: ${{ format(matrix.os.names[matrix.mode.matrix].dev-file, needs.version.outputs.chia-installer-version, needs.version.outputs.chia-dev-version, matrix.os.file-arch-name[matrix.arch.matrix], matrix.os.file-suffix[matrix.arch.matrix]) }} + LATEST_DEV_FILE: ${{ format(matrix.os.names[matrix.mode.matrix].latest-dev-file, needs.version.outputs.chia-installer-version, needs.version.outputs.chia-dev-version, matrix.os.file-arch-name[matrix.arch.matrix], matrix.os.file-suffix[matrix.arch.matrix]) }} + INSTALL_S3_URL: s3://download.chia.net/install/ + DEV_S3_URL: s3://download.chia.net/dev/ + LATEST_DEV_S3_URL: s3://download.chia.net/latest-dev/ + TORRENT_S3_URL: s3://download.chia.net/torrents/ + TRACKER_URL: udp://tracker.opentrackr.org:1337/announce steps: - uses: Chia-Network/actions/clean-workspace@main @@ -222,7 +262,7 @@ jobs: - name: Download constraints file uses: actions/download-artifact@v4 with: - name: constraints-file-${{ matrix.os.arch }} + name: constraints-file-${{ matrix.arch.artifact-name }} path: venv - name: Install utilities @@ -232,8 +272,8 @@ jobs: - name: Download packages uses: actions/download-artifact@v4 with: - name: chia-installers-linux-deb-${{ matrix.os.arch }} - path: build_scripts/final_installer/ + name: chia-installers-${{ matrix.os.artifact-platform-name }}-${{ matrix.os.file-type.extension }}-${{ matrix.arch.artifact-name }} + path: artifacts/ - name: Set Env uses: Chia-Network/actions/setjobenv@main @@ -242,7 +282,6 @@ jobs: - name: Test for secrets access id: check_secrets - shell: bash run: | unset HAS_AWS_SECRET unset HAS_GLUE_SECRET @@ -263,48 +302,38 @@ jobs: role-to-assume: arn:aws:iam::${{ secrets.CHIA_AWS_ACCOUNT_ID }}:role/installer-upload aws-region: us-west-2 - - name: Upload to s3 - if: steps.check_secrets.outputs.HAS_AWS_SECRET + - name: Create Checksums run: | - GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8) - CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH - echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >> "$GITHUB_ENV" - aws s3 cp "$GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb" "s3://download.chia.net/dev/chia-blockchain_${CHIA_DEV_BUILD}_${{ matrix.os.arch }}.deb" - aws s3 cp "$GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb" "s3://download.chia.net/dev/chia-blockchain-cli_${CHIA_DEV_BUILD}-1_${{ matrix.os.arch }}.deb" + ls artifacts/ + sha256sum "artifacts/${FILE}" > "artifacts/${FILE}.sha256" - - name: Create Checksums + - name: Upload to s3 + if: steps.check_secrets.outputs.HAS_AWS_SECRET run: | - ls "$GITHUB_WORKSPACE"/build_scripts/final_installer/ - sha256sum "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb > "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb.sha256 - sha256sum "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb > "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb.sha256 - ls "$GITHUB_WORKSPACE"/build_scripts/final_installer/ + ls artifacts/ + aws s3 cp "artifacts/${FILE}" "${DEV_S3_URL}/${DEV_FILE}" + aws s3 cp "artifacts/${FILE}.sha256" "${LATEST_DEV_S3_URL}/${DEV_FILE}.sha256" - - name: Create .deb torrent - if: env.FULL_RELEASE == 'true' + - name: Create torrent + if: env.FULL_RELEASE == 'true' && matrix.mode.matrix == 'gui' env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb -o "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb.torrent --webseed https://download.chia.net/install/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb - py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb -o "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb.torrent --webseed https://download.chia.net/install/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb - gh release upload --repo ${{ github.repository }} $RELEASE_TAG "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb.torrent + py3createtorrent -f -t ${TRACKER_URL} artifacts/${FILE} -o artifacts/${FILE}.torrent --webseed https://download.chia.net/install/${FILE} + gh release upload --repo ${{ github.repository }} $RELEASE_TAG artifacts/${FILE}.torrent - name: Upload Dev Installer if: steps.check_secrets.outputs.HAS_AWS_SECRET && github.ref == 'refs/heads/main' run: | - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb s3://download.chia.net/latest-dev/chia-blockchain_${{ matrix.os.arch }}_latest_dev.deb - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb.sha256 s3://download.chia.net/latest-dev/chia-blockchain_${{ matrix.os.arch }}_latest_dev.deb.sha256 - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb s3://download.chia.net/latest-dev/chia-blockchain-cli_${{ matrix.os.arch }}_latest_dev.deb - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb.sha256 s3://download.chia.net/latest-dev/chia-blockchain-cli_${{ matrix.os.arch }}_latest_dev.deb.sha256 + aws s3 cp artifacts/${FILE} ${LATEST_DEV_S3_URL}/${LATEST_DEV_FILE} + aws s3 cp artifacts/${FILE}.sha256 ${LATEST_DEV_S3_URL}/${LATEST_DEV_FILE}.sha256 - name: Upload Release Files if: steps.check_secrets.outputs.HAS_AWS_SECRET && env.FULL_RELEASE == 'true' run: | - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb s3://download.chia.net/install/ - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb.sha256 s3://download.chia.net/install/ - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb.torrent s3://download.chia.net/torrents/ - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb s3://download.chia.net/install/ - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb.sha256 s3://download.chia.net/install/ - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb.torrent s3://download.chia.net/torrents/ + aws s3 cp artifacts/${FILE} ${INSTALL_S3_URL} + aws s3 cp artifacts/${FILE}.sha256 ${INSTALL_S3_URL} + aws s3 cp artifacts/${FILE}.torrent ${TORRENT_S3_URL} - name: Upload release artifacts if: env.RELEASE == 'true' @@ -314,26 +343,18 @@ jobs: gh release upload \ --repo ${{ github.repository }} \ $RELEASE_TAG \ - build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_${{ matrix.os.arch }}.deb \ - build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_${{ matrix.os.arch }}.deb + artifacts/${FILE} - - name: Mark pre-release installer complete + - name: Mark installer complete uses: Chia-Network/actions/github/glue@main - if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.PRE_RELEASE == 'true' - with: - json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' - glue_url: "${{ secrets.GLUE_API_URL }}" - glue_project: "${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}" - glue_path: "success/${{ matrix.os.glue-name }}" - - - name: Mark release installer complete - uses: Chia-Network/actions/github/glue@main - if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.FULL_RELEASE == 'true' + if: steps.check_secrets.outputs.HAS_GLUE_SECRET && (env.PRE_RELEASE == 'true' || env.FULL_RELEASE == 'true') + env: + REPO_SUFFIX: ${{ env.PRE_RELEASE == 'true' && '-prerelease' || '' }} with: json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' glue_url: "${{ secrets.GLUE_API_URL }}" - glue_project: "${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}" - glue_path: "success/${{ matrix.os.glue-name }}" + glue_project: "${{ env.RFC_REPO }}${{ env.REPO_SUFFIX }}/${{ env.RELEASE_TAG }}" + glue_path: "success/build-${{ matrix.os.glue-name }}-${{ matrix.arch.glue-name }}-${{ matrix.mode.glue-name }}" test: name: Test ${{ matrix.distribution.name }} ${{ matrix.mode.name }} ${{ matrix.arch.name }} @@ -380,10 +401,10 @@ jobs: arch: - name: ARM64 matrix: arm - artifact-name: arm64 + artifact-name: arm - name: Intel matrix: intel - artifact-name: amd64 + artifact-name: intel env: DEBIAN_FRONTEND: noninteractive diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index c1f401115f23..728ef761e9ae 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -53,6 +53,8 @@ jobs: fail-fast: false matrix: python-version: ["3.10"] + os: + - arch-artifact-name: intel env: CHIA_INSTALLER_VERSION: ${{ needs.version.outputs.chia-installer-version }} @@ -130,7 +132,7 @@ jobs: with: python-version: ${{ matrix.python-version }} development: true - constraints-file-artifact-name: constraints-file-intel + constraints-file-artifact-name: constraints-file-${{ matrix.os.arch-artifact-name }} - uses: chia-network/actions/activate-venv@main @@ -175,15 +177,15 @@ jobs: - name: Upload Linux artifacts uses: actions/upload-artifact@v4 with: - name: chia-installers-linux-rpm-intel - path: ${{ github.workspace }}/build_scripts/final_installer/ + name: chia-installers-linux-rpm-${{ matrix.os.arch-artifact-name }} + path: build_scripts/final_installer/ - name: Remove working files to exclude from cache run: | rm -rf ./chia-blockchain-gui/packages/gui/daemon publish: - name: Publish amd64 RPM + name: Publish ${{ matrix.arch.name }} ${{ matrix.mode.name }} ${{ matrix.os.file-type.name }} runs-on: ubuntu-latest needs: - version @@ -193,9 +195,58 @@ jobs: fail-fast: false matrix: python-version: ["3.10"] + os: + - matrix: redhat + file-type: + name: RPM + extension: rpm + glue-name: rpm + artifact-platform-name: linux + file-arch-name: + intel: x86_64 + file-suffix: + arm: "" + intel: "" + names: + cli: + file: chia-blockchain-cli-{0}-1.{2}.rpm + def-file: chia-blockchain-cli-{1}-1.{2}.rpm + latest-dev-file: chia-blockchain-cli-1.{2}_latest_dev.rpm + gui: + file: chia-blockchain-{0}-1.{2}.rpm + def-file: chia-blockchain-{1}-1.{2}.rpm + latest-dev-file: chia-blockchain-1.{2}_latest_dev.rpm + mode: + - name: GUI + matrix: gui + glue-name: gui + - name: CLI + matrix: cli + glue-name: cli + arch: + - name: ARM64 + matrix: arm + artifact-name: arm + glue-name: arm + - name: Intel + matrix: intel + artifact-name: intel + glue-name: intel + exclude: + - os: + matrix: redhat + arch: + matrix: arm env: - CHIA_INSTALLER_VERSION: ${{ needs.version.outputs.chia-installer-version }} + FILE: ${{ format(matrix.os.names[matrix.mode.matrix].file, needs.version.outputs.chia-installer-version, needs.version.outputs.chia-dev-version, matrix.os.file-arch-name[matrix.arch.matrix], matrix.os.file-suffix[matrix.arch.matrix]) }} + DEV_FILE: ${{ format(matrix.os.names[matrix.mode.matrix].dev-file, needs.version.outputs.chia-installer-version, needs.version.outputs.chia-dev-version, matrix.os.file-arch-name[matrix.arch.matrix], matrix.os.file-suffix[matrix.arch.matrix]) }} + LATEST_DEV_FILE: ${{ format(matrix.os.names[matrix.mode.matrix].latest-dev-file, needs.version.outputs.chia-installer-version, needs.version.outputs.chia-dev-version, matrix.os.file-arch-name[matrix.arch.matrix], matrix.os.file-suffix[matrix.arch.matrix]) }} + INSTALL_S3_URL: s3://download.chia.net/install/ + DEV_S3_URL: s3://download.chia.net/dev/ + LATEST_DEV_S3_URL: s3://download.chia.net/latest-dev/ + TORRENT_S3_URL: s3://download.chia.net/torrents/ + TRACKER_URL: udp://tracker.opentrackr.org:1337/announce steps: - uses: Chia-Network/actions/clean-workspace@main @@ -214,7 +265,7 @@ jobs: - name: Download constraints file uses: actions/download-artifact@v4 with: - name: constraints-file-intel + name: constraints-file-${{ matrix.arch.artifact-name }} path: venv - name: Install utilities @@ -224,8 +275,8 @@ jobs: - name: Download packages uses: actions/download-artifact@v4 with: - name: chia-installers-linux-rpm-intel - path: build_scripts/final_installer/ + name: chia-installers-${{ matrix.os.artifact-platform-name }}-${{ matrix.os.file-type.extension }}-${{ matrix.arch.artifact-name }} + path: artifacts/ - name: Set Env uses: Chia-Network/actions/setjobenv@main @@ -234,7 +285,6 @@ jobs: - name: Test for secrets access id: check_secrets - shell: bash run: | unset HAS_AWS_SECRET unset HAS_GLUE_SECRET @@ -255,49 +305,38 @@ jobs: role-to-assume: arn:aws:iam::${{ secrets.CHIA_AWS_ACCOUNT_ID }}:role/installer-upload aws-region: us-west-2 - - name: Upload to s3 - if: steps.check_secrets.outputs.HAS_AWS_SECRET + - name: Create Checksums run: | - GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8) - CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH - echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >> "$GITHUB_ENV" - ls "$GITHUB_WORKSPACE"/build_scripts/final_installer/ - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/dev/chia-blockchain-${CHIA_DEV_BUILD}-1.x86_64.rpm - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/dev/chia-blockchain-cli-${CHIA_DEV_BUILD}-1.x86_64.rpm + ls artifacts/ + sha256sum "artifacts/${FILE}" > "artifacts/${FILE}.sha256" - - name: Create Checksums + - name: Upload to s3 + if: steps.check_secrets.outputs.HAS_AWS_SECRET run: | - ls "$GITHUB_WORKSPACE"/build_scripts/final_installer/ - sha256sum "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm > "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 - sha256sum "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm > "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 - ls "$GITHUB_WORKSPACE"/build_scripts/final_installer/ + ls artifacts/ + aws s3 cp "artifacts/${FILE}" "${DEV_S3_URL}/${DEV_FILE}" + aws s3 cp "artifacts/${FILE}.sha256" "${LATEST_DEV_S3_URL}/${DEV_FILE}.sha256" - - name: Create .rpm torrent - if: env.FULL_RELEASE == 'true' + - name: Create torrent + if: env.FULL_RELEASE == 'true' && matrix.mode.matrix == 'gui' env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm -o "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent --webseed https://download.chia.net/install/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm - py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm -o "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent --webseed https://download.chia.net/install/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm - gh release upload --repo ${{ github.repository }} $RELEASE_TAG "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent + py3createtorrent -f -t ${TRACKER_URL} artifacts/${FILE} -o artifacts/${FILE}.torrent --webseed https://download.chia.net/install/${FILE} + gh release upload --repo ${{ github.repository }} $RELEASE_TAG artifacts/${FILE}.torrent - name: Upload Dev Installer if: steps.check_secrets.outputs.HAS_AWS_SECRET && github.ref == 'refs/heads/main' run: | - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/latest-dev/chia-blockchain-1.x86_64_latest_dev.rpm - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download.chia.net/latest-dev/chia-blockchain-1.x86_64_latest_dev.rpm.sha256 - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/latest-dev/chia-blockchain-cli-1.x86_64_latest_dev.rpm - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download.chia.net/latest-dev/chia-blockchain-cli-1.x86_64_latest_dev.rpm.sha256 + aws s3 cp artifacts/${FILE} ${LATEST_DEV_S3_URL}/${LATEST_DEV_FILE} + aws s3 cp artifacts/${FILE}.sha256 ${LATEST_DEV_S3_URL}/${LATEST_DEV_FILE}.sha256 - name: Upload Release Files if: steps.check_secrets.outputs.HAS_AWS_SECRET && env.FULL_RELEASE == 'true' run: | - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/install/ - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download.chia.net/install/ - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent s3://download.chia.net/torrents/ - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/install/ - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download.chia.net/install/ - aws s3 cp "$GITHUB_WORKSPACE"/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent s3://download.chia.net/torrents/ + aws s3 cp artifacts/${FILE} ${INSTALL_S3_URL} + aws s3 cp artifacts/${FILE}.sha256 ${INSTALL_S3_URL} + aws s3 cp artifacts/${FILE}.torrent ${TORRENT_S3_URL} - name: Upload release artifacts if: env.RELEASE == 'true' @@ -307,26 +346,18 @@ jobs: gh release upload \ --repo ${{ github.repository }} \ $RELEASE_TAG \ - build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm \ - build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm + artifacts/${FILE} - - name: Mark pre-release installer complete + - name: Mark installer complete uses: Chia-Network/actions/github/glue@main - if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.PRE_RELEASE == 'true' - with: - json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' - glue_url: "${{ secrets.GLUE_API_URL }}" - glue_project: "${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}" - glue_path: "success/build-linux-rpm" - - - name: Mark release installer complete - uses: Chia-Network/actions/github/glue@main - if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.FULL_RELEASE == 'true' + if: steps.check_secrets.outputs.HAS_GLUE_SECRET && (env.PRE_RELEASE == 'true' || env.FULL_RELEASE == 'true') + env: + REPO_SUFFIX: ${{ env.PRE_RELEASE == 'true' && '-prerelease' || '' }} with: json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' glue_url: "${{ secrets.GLUE_API_URL }}" - glue_project: "${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}" - glue_path: "success/build-linux-rpm" + glue_project: "${{ env.RFC_REPO }}${{ env.REPO_SUFFIX }}/${{ env.RELEASE_TAG }}" + glue_path: "success/build-${{ matrix.os.glue-name }}-${{ matrix.arch.glue-name }}-${{ matrix.mode.glue-name }}" test: name: Test ${{ matrix.distribution.name }} ${{ matrix.mode.name }} ${{ matrix.state.name }} diff --git a/.github/workflows/build-macos-installers.yml b/.github/workflows/build-macos-installers.yml index b18e9f473b1d..45d1b25f727a 100644 --- a/.github/workflows/build-macos-installers.yml +++ b/.github/workflows/build-macos-installers.yml @@ -55,9 +55,11 @@ jobs: - runs-on: macos-12 name: intel bladebit-suffix: macos-x86-64.tar.gz + arch-artifact-name: intel - runs-on: macos-13-arm64 name: m1 bladebit-suffix: macos-arm64.tar.gz + arch-artifact-name: arm env: CHIA_INSTALLER_VERSION: ${{ needs.version.outputs.chia-installer-version }} @@ -169,7 +171,7 @@ jobs: with: python-version: ${{ matrix.python-version }} development: true - constraints-file-artifact-name: constraints-file-${{ matrix.os.name }} + constraints-file-artifact-name: constraints-file-${{ matrix.os.arch-artifact-name }} - uses: chia-network/actions/activate-venv@main @@ -216,8 +218,8 @@ jobs: - name: Upload MacOS artifacts uses: actions/upload-artifact@v4 with: - name: chia-installers-macos-dmg-${{ matrix.os.name }} - path: ${{ github.workspace }}/build_scripts/final_installer/ + name: chia-installers-macos-dmg-${{ matrix.os.arch-artifact-name }} + path: build_scripts/final_installer/ - name: Remove working files to exclude from cache run: | @@ -229,7 +231,7 @@ jobs: run: security delete-keychain signing_temp.keychain || true publish: - name: Publish ${{ matrix.os.name }} DMG + name: Publish ${{ matrix.arch.name }} ${{ matrix.os.file-type.name }} runs-on: ubuntu-latest needs: - version @@ -240,21 +242,58 @@ jobs: matrix: python-version: ["3.10"] os: - - name: intel - file-suffix: "" - glue-name: "build-macos" - - name: m1 - file-suffix: "-arm64" - glue-name: "build-mac-m1" - + - matrix: macos + file-type: + name: DMG + extension: dmg + glue-name: macos + artifact-platform-name: macos + file-arch-name: + arm: m1 + intel: intel + file-suffix: + arm: "-arm64" + intel: "" + names: + gui: + file: Chia-{0}{3}.dmg + dev-file: Chia-{1}{3}.dmg + latest-dev-file: Chia-{3}_latest_dev.dmg + mode: + - name: GUI + matrix: gui + glue-name: gui + - name: CLI + matrix: cli + glue-name: cli + arch: + - name: ARM64 + matrix: arm + artifact-name: arm + glue-name: arm + - name: Intel + matrix: intel + artifact-name: intel + glue-name: intel + exclude: + - os: + matrix: macos + mode: + matrix: cli env: - CHIA_INSTALLER_VERSION: ${{ needs.version.outputs.chia-installer-version }} + FILE: ${{ format(matrix.os.names[matrix.mode.matrix].file, needs.version.outputs.chia-installer-version, needs.version.outputs.chia-dev-version, matrix.os.file-arch-name[matrix.arch.matrix], matrix.os.file-suffix[matrix.arch.matrix]) }} + DEV_FILE: ${{ format(matrix.os.names[matrix.mode.matrix].dev-file, needs.version.outputs.chia-installer-version, needs.version.outputs.chia-dev-version, matrix.os.file-arch-name[matrix.arch.matrix], matrix.os.file-suffix[matrix.arch.matrix]) }} + LATEST_DEV_FILE: ${{ format(matrix.os.names[matrix.mode.matrix].latest-dev-file, needs.version.outputs.chia-installer-version, needs.version.outputs.chia-dev-version, matrix.os.file-arch-name[matrix.arch.matrix], matrix.os.file-suffix[matrix.arch.matrix]) }} + INSTALL_S3_URL: s3://download.chia.net/install/ + DEV_S3_URL: s3://download.chia.net/dev/ + LATEST_DEV_S3_URL: s3://download.chia.net/latest-dev/ + TORRENT_S3_URL: s3://download.chia.net/torrents/ + TRACKER_URL: udp://tracker.opentrackr.org:1337/announce steps: - uses: Chia-Network/actions/clean-workspace@main - - name: Setup Python environment - uses: Chia-Network/actions/setup-python@main + - uses: Chia-Network/actions/setup-python@main with: python-version: ${{ matrix.python-version }} @@ -268,7 +307,7 @@ jobs: - name: Download constraints file uses: actions/download-artifact@v4 with: - name: constraints-file-${{ matrix.os.name }} + name: constraints-file-${{ matrix.arch.artifact-name }} path: venv - name: Install utilities @@ -278,8 +317,8 @@ jobs: - name: Download packages uses: actions/download-artifact@v4 with: - name: chia-installers-macos-dmg-${{ matrix.os.name }} - path: build_scripts/final_installer/ + name: chia-installers-${{ matrix.os.artifact-platform-name }}-${{ matrix.os.file-type.extension }}-${{ matrix.arch.artifact-name }} + path: artifacts/ - name: Set Env uses: Chia-Network/actions/setjobenv@main @@ -288,7 +327,6 @@ jobs: - name: Test for secrets access id: check_secrets - shell: bash run: | unset HAS_AWS_SECRET unset HAS_GLUE_SECRET @@ -302,11 +340,6 @@ jobs: AWS_SECRET: "${{ secrets.CHIA_AWS_ACCOUNT_ID }}" GLUE_API_URL: "${{ secrets.GLUE_API_URL }}" - - name: Create Checksums - run: | - ls - sha256sum ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg > ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.sha256 - - name: Configure AWS credentials if: steps.check_secrets.outputs.HAS_AWS_SECRET uses: aws-actions/configure-aws-credentials@v4 @@ -314,36 +347,38 @@ jobs: role-to-assume: arn:aws:iam::${{ secrets.CHIA_AWS_ACCOUNT_ID }}:role/installer-upload aws-region: us-west-2 + - name: Create Checksums + run: | + ls artifacts/ + sha256sum "artifacts/${FILE}" > "artifacts/${FILE}.sha256" + - name: Upload to s3 if: steps.check_secrets.outputs.HAS_AWS_SECRET run: | - GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8) - CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH - echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >> "$GITHUB_ENV" - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg s3://download.chia.net/dev/Chia-${CHIA_DEV_BUILD}${{ matrix.os.file-suffix }}.dmg - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.sha256 s3://download.chia.net/latest-dev/Chia-${CHIA_DEV_BUILD}${{ matrix.os.file-suffix }}.dmg.sha256 + ls artifacts/ + aws s3 cp "artifacts/${FILE}" "${DEV_S3_URL}/${DEV_FILE}" + aws s3 cp "artifacts/${FILE}.sha256" "${LATEST_DEV_S3_URL}/${DEV_FILE}.sha256" - name: Create torrent + if: env.FULL_RELEASE == 'true' && matrix.mode.matrix == 'gui' env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - if: env.FULL_RELEASE == 'true' run: | - py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg -o ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent --webseed https://download.chia.net/install/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg - ls ${{ github.workspace }}/build_scripts/final_installer/ - gh release upload --repo ${{ github.repository }} $RELEASE_TAG ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent + py3createtorrent -f -t ${TRACKER_URL} artifacts/${FILE} -o artifacts/${FILE}.torrent --webseed https://download.chia.net/install/${FILE} + gh release upload --repo ${{ github.repository }} $RELEASE_TAG artifacts/${FILE}.torrent - name: Upload Dev Installer if: steps.check_secrets.outputs.HAS_AWS_SECRET && github.ref == 'refs/heads/main' run: | - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg s3://download.chia.net/latest-dev/Chia${{ matrix.os.file-suffix }}_latest_dev.dmg - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.sha256 s3://download.chia.net/latest-dev/Chia${{ matrix.os.file-suffix }}_latest_dev.dmg.sha256 + aws s3 cp artifacts/${FILE} ${LATEST_DEV_S3_URL}/${LATEST_DEV_FILE} + aws s3 cp artifacts/${FILE}.sha256 ${LATEST_DEV_S3_URL}/${LATEST_DEV_FILE}.sha256 - name: Upload Release Files if: steps.check_secrets.outputs.HAS_AWS_SECRET && env.FULL_RELEASE == 'true' run: | - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg s3://download.chia.net/install/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.sha256 s3://download.chia.net/install/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.sha256 - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent s3://download.chia.net/torrents/Chia-${{ env.CHIA_INSTALLER_VERSION }}${{ matrix.os.file-suffix }}.dmg.torrent + aws s3 cp artifacts/${FILE} ${INSTALL_S3_URL} + aws s3 cp artifacts/${FILE}.sha256 ${INSTALL_S3_URL} + aws s3 cp artifacts/${FILE}.torrent ${TORRENT_S3_URL} - name: Upload release artifacts if: env.RELEASE == 'true' @@ -353,25 +388,18 @@ jobs: gh release upload \ --repo ${{ github.repository }} \ $RELEASE_TAG \ - build_scripts/final_installer/*.dmg + artifacts/${FILE} - - name: Mark pre-release installer complete + - name: Mark installer complete uses: Chia-Network/actions/github/glue@main - if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.PRE_RELEASE == 'true' - with: - json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' - glue_url: "${{ secrets.GLUE_API_URL }}" - glue_project: "${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}" - glue_path: "success/${{ matrix.os.glue-name }}" - - - name: Mark release installer complete - uses: Chia-Network/actions/github/glue@main - if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.FULL_RELEASE == 'true' + if: steps.check_secrets.outputs.HAS_GLUE_SECRET && (env.PRE_RELEASE == 'true' || env.FULL_RELEASE == 'true') + env: + REPO_SUFFIX: ${{ env.PRE_RELEASE == 'true' && '-prerelease' || '' }} with: json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' glue_url: "${{ secrets.GLUE_API_URL }}" - glue_project: "${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}" - glue_path: "success/${{ matrix.os.glue-name }}" + glue_project: "${{ env.RFC_REPO }}${{ env.REPO_SUFFIX }}/${{ env.RELEASE_TAG }}" + glue_path: "success/build-${{ matrix.os.glue-name }}-${{ matrix.arch.glue-name }}-${{ matrix.mode.glue-name }}" test: name: Test ${{ matrix.os.name }} ${{ matrix.arch.name }} @@ -398,7 +426,7 @@ jobs: arch: - name: ARM64 matrix: arm - artifact-name: m1 + artifact-name: arm - name: Intel matrix: intel artifact-name: intel @@ -428,10 +456,10 @@ jobs: - name: Mount .dmg env: - PACKAGE_PATH: ${{ github.workspace }}/build_scripts/final_installer/ + PACKAGE_PATH: artifacts/ run: | ls -l "${{ steps.download.outputs.download-path }}" - hdiutil attach "${{ steps.download.outputs.download-path }}"/chia-*.dmg + hdiutil attach "${{ steps.download.outputs.download-path }}"/Chia-*.dmg - name: List .dmg contents run: | diff --git a/.github/workflows/build-windows-installer.yml b/.github/workflows/build-windows-installer.yml index 3d0c9235b749..c6b63678a64b 100644 --- a/.github/workflows/build-windows-installer.yml +++ b/.github/workflows/build-windows-installer.yml @@ -232,7 +232,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: chia-installers-windows-exe-intel - path: ${{ github.workspace }}\chia-blockchain-gui\release-builds\ + path: chia-blockchain-gui\release-builds\windows-installer\ - name: Remove Windows exe and installer to exclude from cache run: | @@ -242,7 +242,7 @@ jobs: Remove-Item .\chia-blockchain-gui\release-builds -Recurse -Force publish: - name: Publish EXE + name: Publish ${{ matrix.arch.name }} ${{ matrix.mode.name }} ${{ matrix.os.file-type.name }} runs-on: ubuntu-latest needs: - version @@ -252,9 +252,58 @@ jobs: fail-fast: false matrix: python-version: ["3.10"] + os: + - matrix: windows + file-type: + name: EXE + extension: exe + glue-name: windows + artifact-platform-name: windows + file-arch-name: + intel: intel + file-suffix: + arm: "" + intel: "" + names: + gui: + file: ChiaSetup-{0}.exe + dev-file: ChiaSetup-{1}.exe + latest-dev-file: ChiaSetup-latest-dev.exe + mode: + - name: GUI + matrix: gui + glue-name: gui + - name: CLI + matrix: cli + glue-name: cli + arch: + - name: ARM64 + matrix: arm + artifact-name: arm + glue-name: arm + - name: Intel + matrix: intel + artifact-name: intel + glue-name: intel + exclude: + - os: + matrix: windows + arch: + matrix: arm + - os: + matrix: windows + mode: + matrix: cli env: - CHIA_INSTALLER_VERSION: ${{ needs.version.outputs.chia-installer-version }} + FILE: ${{ format(matrix.os.names[matrix.mode.matrix].file, needs.version.outputs.chia-installer-version, needs.version.outputs.chia-dev-version, matrix.os.file-arch-name[matrix.arch.matrix], matrix.os.file-suffix[matrix.arch.matrix]) }} + DEV_FILE: ${{ format(matrix.os.names[matrix.mode.matrix].dev-file, needs.version.outputs.chia-installer-version, needs.version.outputs.chia-dev-version, matrix.os.file-arch-name[matrix.arch.matrix], matrix.os.file-suffix[matrix.arch.matrix]) }} + LATEST_DEV_FILE: ${{ format(matrix.os.names[matrix.mode.matrix].latest-dev-file, needs.version.outputs.chia-installer-version, needs.version.outputs.chia-dev-version, matrix.os.file-arch-name[matrix.arch.matrix], matrix.os.file-suffix[matrix.arch.matrix]) }} + INSTALL_S3_URL: s3://download.chia.net/install/ + DEV_S3_URL: s3://download.chia.net/dev/ + LATEST_DEV_S3_URL: s3://download.chia.net/latest-dev/ + TORRENT_S3_URL: s3://download.chia.net/torrents/ + TRACKER_URL: udp://tracker.opentrackr.org:1337/announce steps: - uses: Chia-Network/actions/clean-workspace@main @@ -273,7 +322,7 @@ jobs: - name: Download constraints file uses: actions/download-artifact@v4 with: - name: constraints-file-intel + name: constraints-file-${{ matrix.arch.artifact-name }} path: venv - name: Install utilities @@ -283,8 +332,8 @@ jobs: - name: Download packages uses: actions/download-artifact@v4 with: - name: chia-installers-windows-exe-intel - path: chia-blockchain-gui/release-builds/ + name: chia-installers-${{ matrix.os.artifact-platform-name }}-${{ matrix.os.file-type.extension }}-${{ matrix.arch.artifact-name }} + path: artifacts/ - name: Set Env uses: Chia-Network/actions/setjobenv@main @@ -303,7 +352,6 @@ jobs: if [ -n "$GLUE_API_URL" ]; then HAS_GLUE_SECRET='true' ; fi echo HAS_GLUE_SECRET=${HAS_GLUE_SECRET} >> "$GITHUB_OUTPUT" env: - SIGNING_SECRET: "${{ secrets.SM_CLIENT_CERT_FILE_B64 }}" AWS_SECRET: "${{ secrets.CHIA_AWS_ACCOUNT_ID }}" GLUE_API_URL: "${{ secrets.GLUE_API_URL }}" @@ -314,68 +362,59 @@ jobs: role-to-assume: arn:aws:iam::${{ secrets.CHIA_AWS_ACCOUNT_ID }}:role/installer-upload aws-region: us-west-2 - - name: Upload to s3 - if: steps.check_secrets.outputs.HAS_AWS_SECRET + - name: Create Checksums run: | - GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8) - CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH - echo CHIA_DEV_BUILD=${CHIA_DEV_BUILD} >> "$GITHUB_OUTPUT" - echo ${CHIA_DEV_BUILD} - pwd - aws s3 cp chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${CHIA_INSTALLER_VERSION}.exe s3://download.chia.net/dev/ChiaSetup-${CHIA_DEV_BUILD}.exe + ls artifacts/ + sha256sum "artifacts/${FILE}" > "artifacts/${FILE}.sha256" - - name: Create Checksums + - name: Upload to s3 + if: steps.check_secrets.outputs.HAS_AWS_SECRET run: | - ls "$GITHUB_WORKSPACE"/chia-blockchain-gui/release-builds/windows-installer/ - sha256sum "$GITHUB_WORKSPACE"/chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe > "$GITHUB_WORKSPACE"/chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe.sha256 - ls "$GITHUB_WORKSPACE"/chia-blockchain-gui/release-builds/windows-installer/ + ls artifacts/ + aws s3 cp "artifacts/${FILE}" "${DEV_S3_URL}/${DEV_FILE}" + aws s3 cp "artifacts/${FILE}.sha256" "${LATEST_DEV_S3_URL}/${DEV_FILE}.sha256" - name: Create torrent - if: env.FULL_RELEASE == 'true' + if: env.FULL_RELEASE == 'true' && matrix.mode.matrix == 'gui' env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce "${GITHUB_WORKSPACE}"/chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe -o "${GITHUB_WORKSPACE}"/chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe.torrent --webseed https://download.chia.net/install/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe - ls - gh release upload --repo ${{ github.repository }} $RELEASE_TAG "${GITHUB_WORKSPACE}"/chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe.torrent + py3createtorrent -f -t ${TRACKER_URL} artifacts/${FILE} -o artifacts/${FILE}.torrent --webseed https://download.chia.net/install/${FILE} + gh release upload --repo ${{ github.repository }} $RELEASE_TAG artifacts/${FILE}.torrent - name: Upload Dev Installer if: steps.check_secrets.outputs.HAS_AWS_SECRET && github.ref == 'refs/heads/main' run: | - aws s3 cp "${GITHUB_WORKSPACE}"/chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe s3://download.chia.net/latest-dev/ChiaSetup-latest-dev.exe - aws s3 cp "${GITHUB_WORKSPACE}"/chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe.sha256 s3://download.chia.net/latest-dev/ChiaSetup-latest-dev.exe.sha256 + aws s3 cp artifacts/${FILE} ${LATEST_DEV_S3_URL}/${LATEST_DEV_FILE} + aws s3 cp artifacts/${FILE}.sha256 ${LATEST_DEV_S3_URL}/${LATEST_DEV_FILE}.sha256 - name: Upload Release Files if: steps.check_secrets.outputs.HAS_AWS_SECRET && env.FULL_RELEASE == 'true' run: | - aws s3 cp "${GITHUB_WORKSPACE}"/chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe s3://download.chia.net/install/ - aws s3 cp "${GITHUB_WORKSPACE}"/chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe.sha256 s3://download.chia.net/install/ - aws s3 cp "${GITHUB_WORKSPACE}"/chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe.torrent s3://download.chia.net/torrents/ + aws s3 cp artifacts/${FILE} ${INSTALL_S3_URL} + aws s3 cp artifacts/${FILE}.sha256 ${INSTALL_S3_URL} + aws s3 cp artifacts/${FILE}.torrent ${TORRENT_S3_URL} - name: Upload release artifacts if: env.RELEASE == 'true' env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - gh release upload --repo ${{ github.repository }} $RELEASE_TAG "${GITHUB_WORKSPACE}"/chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${{ env.CHIA_INSTALLER_VERSION }}.exe + gh release upload \ + --repo ${{ github.repository }} \ + $RELEASE_TAG \ + artifacts/${FILE} - - name: Mark pre-release installer complete + - name: Mark installer complete uses: Chia-Network/actions/github/glue@main - if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.PRE_RELEASE == 'true' - with: - json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' - glue_url: "${{ secrets.GLUE_API_URL }}" - glue_project: "${{ env.RFC_REPO }}-prerelease/${{ env.RELEASE_TAG }}" - glue_path: "success/build-windows" - - - name: Mark release installer complete - uses: Chia-Network/actions/github/glue@main - if: steps.check_secrets.outputs.HAS_GLUE_SECRET && env.FULL_RELEASE == 'true' + if: steps.check_secrets.outputs.HAS_GLUE_SECRET && (env.PRE_RELEASE == 'true' || env.FULL_RELEASE == 'true') + env: + REPO_SUFFIX: ${{ env.PRE_RELEASE == 'true' && '-prerelease' || '' }} with: json_data: '{"chia_ref": "${{ env.RELEASE_TAG }}"}' glue_url: "${{ secrets.GLUE_API_URL }}" - glue_project: "${{ env.RFC_REPO }}/${{ env.RELEASE_TAG }}" - glue_path: "success/build-windows" + glue_project: "${{ env.RFC_REPO }}${{ env.REPO_SUFFIX }}/${{ env.RELEASE_TAG }}" + glue_path: "success/build-${{ matrix.os.glue-name }}-${{ matrix.arch.glue-name }}-${{ matrix.mode.glue-name }}" test: name: Test ${{ matrix.os.name }} @@ -399,6 +438,9 @@ jobs: - name: Intel matrix: intel + env: + INSTALL_PATH: installed/ + steps: - uses: Chia-Network/actions/clean-workspace@main @@ -409,19 +451,26 @@ jobs: path: packages - name: Install package - env: - INSTALL_PATH: ${{ github.workspace }}\installed run: | dir ./packages/ - $env:INSTALLER_PATH = (Get-ChildItem packages/windows-installer/ChiaSetup-*.exe) - Start-Process -Wait -FilePath $env:INSTALLER_PATH -ArgumentList "/S", ("/D=" + $env:INSTALL_PATH) + $env:INSTALLER_PATH = (Get-ChildItem packages/ChiaSetup-*.exe) + # note that the installer requires the target path use backslashes + $env:RESOLVED_INSTALL_PATH = $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath($env:INSTALL_PATH) + Start-Process -Wait -FilePath $env:INSTALLER_PATH -ArgumentList "/S", ("/D=" + $env:RESOLVED_INSTALL_PATH) + echo ====================================== + dir ./ + echo ====================================== + dir ./installed/ - name: List installed files run: | Get-ChildItem -Recurse $env:INSTALL_PATH | Select FullName + - name: List all files + if: + run: | + Get-ChildItem -Recurse $env:INSTALL_PATH | Select FullName + - name: Run chia dev installers test - env: - INSTALL_PATH: ${{ github.workspace }}\installed run: | - & ($env:INSTALL_PATH + "\resources\app.asar.unpacked\daemon\chia.exe") dev installers test --expected-chia-version "${{ needs.version.outputs.chia-installer-version }}" + & ($env:INSTALL_PATH + "/resources/app.asar.unpacked/daemon/chia.exe") dev installers test --expected-chia-version "${{ needs.version.outputs.chia-installer-version }}" diff --git a/.github/workflows/reflow-version.yml b/.github/workflows/reflow-version.yml index 6339fc28eba8..695e77a27ec4 100644 --- a/.github/workflows/reflow-version.yml +++ b/.github/workflows/reflow-version.yml @@ -14,6 +14,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 5 outputs: + chia-dev-version: ${{ steps.version-number.outputs.chia-dev-version }} chia-installer-version: ${{ steps.version-number.outputs.chia-installer-version }} tag-type: ${{ steps.tag-type.outputs.tag-type }} @@ -46,5 +47,10 @@ jobs: . ../venv/bin/activate python -m pip install --upgrade pip pip install poetry "poetry-dynamic-versioning[plugin]" - echo "chia-installer-version=$(poetry version -s)" >> "$GITHUB_OUTPUT" + + VERSION=$(poetry version -s) + echo "chia-installer-version=${VERSION}" >> "$GITHUB_OUTPUT" + GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8) + echo "chia-dev-version=${VERSION}-${GIT_SHORT_HASH}" >> "$GITHUB_OUTPUT" + deactivate diff --git a/build_scripts/build_macos-2-installer.sh b/build_scripts/build_macos-2-installer.sh index fc7a526f5f4a..ed85041bb319 100644 --- a/build_scripts/build_macos-2-installer.sh +++ b/build_scripts/build_macos-2-installer.sh @@ -90,12 +90,14 @@ mv dist/* ../../../build_scripts/dist/ cd ../../../build_scripts || exit 1 mkdir final_installer -DMG_NAME="chia-${CHIA_INSTALLER_VERSION}.dmg" +ORIGINAL_DMG_NAME="chia-${CHIA_INSTALLER_VERSION}.dmg" if [ "$(arch)" = "arm64" ]; then - mv dist/"${DMG_NAME}" dist/chia-"${CHIA_INSTALLER_VERSION}"-arm64.dmg - DMG_NAME=chia-${CHIA_INSTALLER_VERSION}-arm64.dmg + DMG_NAME=Chia-${CHIA_INSTALLER_VERSION}-arm64.dmg +else + # NOTE: when coded, this changes the case to Chia + DMG_NAME=Chia-${CHIA_INSTALLER_VERSION}.dmg fi -mv dist/"$DMG_NAME" final_installer/ +mv dist/"$ORIGINAL_DMG_NAME" final_installer/"$DMG_NAME" ls -lh final_installer From 7de41fc9b4dc68b9194eb99dabed15b44159895b Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Mon, 15 Jul 2024 16:36:01 -0400 Subject: [PATCH 74/77] tidy `import aiosqlite as aiosqlite` (#18278) --- chia/data_layer/data_layer_util.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/chia/data_layer/data_layer_util.py b/chia/data_layer/data_layer_util.py index ba1bd8e19c1c..28897a6d5d50 100644 --- a/chia/data_layer/data_layer_util.py +++ b/chia/data_layer/data_layer_util.py @@ -5,8 +5,7 @@ from enum import Enum, IntEnum from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union -# TODO: remove or formalize this -import aiosqlite as aiosqlite +import aiosqlite from typing_extensions import final from chia.data_layer.data_layer_errors import ProofIntegrityError From 3cc89daf590d7541db306f509a085ca2628531be Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jul 2024 13:36:26 -0700 Subject: [PATCH 75/77] build(deps): bump cryptography from 42.0.5 to 42.0.8 (#18204) Bumps [cryptography](https://github.com/pyca/cryptography) from 42.0.5 to 42.0.8. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/42.0.5...42.0.8) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 68 +++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2d7b9ebaf413..ce221b865670 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1141,43 +1141,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.5" +version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, - {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, - {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, - {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, - {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, - {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, - {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, ] [package.dependencies] @@ -3424,4 +3424,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.8.10, <3.13" -content-hash = "047b1073091dde271fd618d0464255a087f323c6d4593d4c2d5d87149949eac6" +content-hash = "0ec94db1f62434dc25644dcd682b74ee88fd43f912e9db2076b308ca8c7db775" diff --git a/pyproject.toml b/pyproject.toml index fd73373709c7..27f0601e3355 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,7 +55,7 @@ clvm_tools_rs = "0.1.40" # Rust implementation of clvm_tools' compiler colorama = "0.4.6" # Colorizes terminal output colorlog = "6.8.2" # Adds color to logs concurrent_log_handler = "0.9.25" # Concurrently log and rotate logs -cryptography = "42.0.5" # Python cryptography library for TLS - keyring conflict +cryptography = "42.0.8" # Python cryptography library for TLS - keyring conflict dnslib = "0.9.24" # dns lib dnspython = "2.6.1" # Query DNS seeds filelock = "3.15.4" # For reading and writing config multiprocess and multithread safely (non-reentrant locks) From 29944cee77c8c6aef641856e0eefc877bd1750ed Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Mon, 15 Jul 2024 21:47:50 -0400 Subject: [PATCH 76/77] set pip rebase strategy to disabled (#18306) --- .repo-content-updater.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.repo-content-updater.yml b/.repo-content-updater.yml index dbb0e6b105e5..b6b565bc5b60 100644 --- a/.repo-content-updater.yml +++ b/.repo-content-updater.yml @@ -1,2 +1,3 @@ var_overrides: DEPENDABOT_ACTIONS_REVIEWERS: '["cmmarslender", "altendky"]' + DEPENDABOT_PIP_REBASE_STRATEGY: disabled From 1c6ca1df4483c45f0e0731c859d667b0a7b52037 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jul 2024 18:48:26 -0700 Subject: [PATCH 77/77] build(deps): bump click from 8.1.3 to 8.1.7 (#18161) Bumps [click](https://github.com/pallets/click) from 8.1.3 to 8.1.7. - [Release notes](https://github.com/pallets/click/releases) - [Changelog](https://github.com/pallets/click/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/click/compare/8.1.3...8.1.7) --- updated-dependencies: - dependency-name: click dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index ce221b865670..7592ee8e8796 100644 --- a/poetry.lock +++ b/poetry.lock @@ -950,13 +950,13 @@ files = [ [[package]] name = "click" -version = "8.1.3" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -3424,4 +3424,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.8.10, <3.13" -content-hash = "0ec94db1f62434dc25644dcd682b74ee88fd43f912e9db2076b308ca8c7db775" +content-hash = "890c2c6f92869c764c4004cb1c445e5d7a504df0d97988a925cde93f47817831" diff --git a/pyproject.toml b/pyproject.toml index 27f0601e3355..0670e0047ff9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,7 +48,7 @@ chiabip158 = "1.5.1" # bip158-style wallet filters chiapos = "2.0.4" # proof of space chia_rs = "0.10.0" chiavdf = "1.1.4" # timelord and vdf verification -click = "8.1.3" # For the CLI +click = "8.1.7" # For the CLI clvm = "0.9.10" clvm_tools = "0.4.9" # Currying Program.to other conveniences clvm_tools_rs = "0.1.40" # Rust implementation of clvm_tools' compiler