Skip to content

Commit

Permalink
Merge branch 'main' into aiohttp_3_9_2
Browse files Browse the repository at this point in the history
  • Loading branch information
altendky committed Mar 25, 2024
2 parents 37e2755 + 5350428 commit 2d40fa6
Show file tree
Hide file tree
Showing 13 changed files with 241 additions and 222 deletions.
14 changes: 1 addition & 13 deletions .github/workflows/benchmarks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,19 +74,7 @@ jobs:
with:
fetch-depth: 0

- name: Get pip cache dir
id: pip-cache
shell: bash
run: |
echo "dir=$(pip cache dir)" >> "$GITHUB_OUTPUT"
- name: Cache pip
uses: actions/cache@v4
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
restore-keys: |
${{ runner.os }}-pip-
- uses: chia-network/actions/cache-pip@main

- name: Checkout test blocks and plots
uses: actions/checkout@v4
Expand Down
14 changes: 1 addition & 13 deletions .github/workflows/build-windows-installer.yml
Original file line number Diff line number Diff line change
Expand Up @@ -92,19 +92,7 @@ jobs:
restore-keys: |
${{ runner.os }}-node-
- name: Get pip cache dir
id: pip-cache
shell: bash
run: |
echo "dir=$(pip cache dir)" >> "$GITHUB_OUTPUT"
- name: Cache pip
uses: actions/cache@v4
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
restore-keys: |
${{ runner.os }}-pip-
- uses: chia-network/actions/cache-pip@main

- uses: Chia-Network/actions/setup-python@main
name: Install Python ${{ matrix.python-version }}
Expand Down
17 changes: 1 addition & 16 deletions .github/workflows/test-single.yml
Original file line number Diff line number Diff line change
Expand Up @@ -162,22 +162,7 @@ jobs:
restore-keys: |
${{ runner.os }}-node-
- name: Get pip cache dir
id: pip-cache
shell: bash
run: |
echo "dir=$(pip cache dir)" >> "$GITHUB_OUTPUT"
- name: Cache pip
uses: actions/cache@v4
env:
SEGMENT_DOWNLOAD_TIMEOUT_MIN: 1
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
restore-keys: |
${{ runner.os }}-pip-
- uses: chia-network/actions/cache-pip@main

- name: Cache test blocks and plots
if: matrix.configuration.checkout_blocks_and_plots
Expand Down
2 changes: 1 addition & 1 deletion chia-blockchain-gui
8 changes: 8 additions & 0 deletions chia/_tests/core/data_layer/test_data_rpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -2582,6 +2582,14 @@ async def test_dl_proof_errors(
with pytest.raises(ValueError, match="no root"):
await data_rpc_api.get_proof(request={"store_id": fakeroot.hex(), "keys": []})

with pytest.raises(Exception, match="No generations found"):
await data_rpc_api.get_proof(request={"store_id": store_id.hex(), "keys": [b"4".hex()]})

changelist: List[Dict[str, str]] = [{"action": "insert", "key": b"a".hex(), "value": b"\x00\x01".hex()}]
res = await data_rpc_api.batch_update({"id": store_id.hex(), "changelist": changelist})
update_tx_rec0 = res["tx_id"]
await farm_block_with_spend(full_node_api, ph, update_tx_rec0, wallet_rpc_api)

with pytest.raises(KeyNotFoundError, match="Key not found"):
await data_rpc_api.get_proof(request={"store_id": store_id.hex(), "keys": [b"4".hex()]})

Expand Down
100 changes: 28 additions & 72 deletions chia/_tests/core/data_layer/test_data_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@
from dataclasses import dataclass
from pathlib import Path
from random import Random
from typing import Any, Awaitable, Callable, Dict, List, Optional, Set, Tuple, cast
from typing import Any, Awaitable, Callable, Dict, List, Set, Tuple, cast

import aiohttp
import aiosqlite
import pytest

Expand Down Expand Up @@ -310,8 +311,6 @@ async def test_get_ancestors_optimized(data_store: DataStore, tree_id: bytes32)
if i > 25 and i <= 200 and random.randint(0, 4):
is_insert = True
if i > 200:
kv_compressed = await data_store.get_keys_values_compressed(tree_id=tree_id)
hint_keys_values = kv_compressed.keys_values_hashed
if not deleted_all:
while node_count > 0:
node_count -= 1
Expand All @@ -320,9 +319,7 @@ async def test_get_ancestors_optimized(data_store: DataStore, tree_id: bytes32)
assert node_hash is not None
node = await data_store.get_node(node_hash)
assert isinstance(node, TerminalNode)
await data_store.delete(
key=node.key, tree_id=tree_id, hint_keys_values=hint_keys_values, status=Status.COMMITTED
)
await data_store.delete(key=node.key, tree_id=tree_id, status=Status.COMMITTED)
deleted_all = True
is_insert = True
else:
Expand Down Expand Up @@ -386,7 +383,6 @@ async def test_batch_update(data_store: DataStore, tree_id: bytes32, use_optimiz

batch: List[Dict[str, Any]] = []
keys_values: Dict[bytes, bytes] = {}
hint_keys_values: Optional[Dict[bytes32, bytes32]] = {} if use_optimized else None
for operation in range(num_batches * num_ops_per_batch):
[op_type] = random.choices(
["insert", "upsert-insert", "upsert-update", "delete"],
Expand All @@ -403,7 +399,6 @@ async def test_batch_update(data_store: DataStore, tree_id: bytes32, use_optimiz
key=key,
value=value,
tree_id=tree_id,
hint_keys_values=hint_keys_values,
use_optimized=use_optimized,
status=Status.COMMITTED,
)
Expand All @@ -412,7 +407,6 @@ async def test_batch_update(data_store: DataStore, tree_id: bytes32, use_optimiz
key=key,
new_value=value,
tree_id=tree_id,
hint_keys_values=hint_keys_values,
use_optimized=use_optimized,
status=Status.COMMITTED,
)
Expand All @@ -425,7 +419,6 @@ async def test_batch_update(data_store: DataStore, tree_id: bytes32, use_optimiz
await single_op_data_store.delete(
key=key,
tree_id=tree_id,
hint_keys_values=hint_keys_values,
use_optimized=use_optimized,
status=Status.COMMITTED,
)
Expand All @@ -440,7 +433,6 @@ async def test_batch_update(data_store: DataStore, tree_id: bytes32, use_optimiz
key=key,
new_value=new_value,
tree_id=tree_id,
hint_keys_values=hint_keys_values,
use_optimized=use_optimized,
status=Status.COMMITTED,
)
Expand Down Expand Up @@ -493,13 +485,11 @@ async def test_upsert_ignores_existing_arguments(
) -> None:
key = b"key"
value = b"value1"
hint_keys_values: Optional[Dict[bytes32, bytes32]] = {} if use_optimized else None

await data_store.autoinsert(
key=key,
value=value,
tree_id=tree_id,
hint_keys_values=hint_keys_values,
use_optimized=use_optimized,
status=Status.COMMITTED,
)
Expand All @@ -511,7 +501,6 @@ async def test_upsert_ignores_existing_arguments(
key=key,
new_value=new_value,
tree_id=tree_id,
hint_keys_values=hint_keys_values,
use_optimized=use_optimized,
status=Status.COMMITTED,
)
Expand All @@ -522,7 +511,6 @@ async def test_upsert_ignores_existing_arguments(
key=key,
new_value=new_value,
tree_id=tree_id,
hint_keys_values=hint_keys_values,
use_optimized=use_optimized,
status=Status.COMMITTED,
)
Expand All @@ -534,7 +522,6 @@ async def test_upsert_ignores_existing_arguments(
key=key2,
new_value=value,
tree_id=tree_id,
hint_keys_values=hint_keys_values,
use_optimized=use_optimized,
status=Status.COMMITTED,
)
Expand Down Expand Up @@ -646,8 +633,6 @@ async def test_inserting_duplicate_key_fails(
side=Side.RIGHT,
)

kv_compressed = await data_store.get_keys_values_compressed(tree_id=tree_id)
hint_keys_values = kv_compressed.keys_values_hashed
# TODO: more specific exception
with pytest.raises(Exception):
await data_store.insert(
Expand All @@ -656,7 +641,6 @@ async def test_inserting_duplicate_key_fails(
tree_id=tree_id,
reference_node_hash=insert_result.node_hash,
side=Side.RIGHT,
hint_keys_values=hint_keys_values,
)


Expand Down Expand Up @@ -695,13 +679,12 @@ async def test_inserting_invalid_length_ancestor_hash_raises_original_exception(
async def test_autoinsert_balances_from_scratch(data_store: DataStore, tree_id: bytes32) -> None:
random = Random()
random.seed(100, version=2)
hint_keys_values: Dict[bytes32, bytes32] = {}
hashes = []

for i in range(2000):
key = (i + 100).to_bytes(4, byteorder="big")
value = (i + 200).to_bytes(4, byteorder="big")
insert_result = await data_store.autoinsert(key, value, tree_id, hint_keys_values, status=Status.COMMITTED)
insert_result = await data_store.autoinsert(key, value, tree_id, status=Status.COMMITTED)
hashes.append(insert_result.node_hash)

heights = {node_hash: len(await data_store.get_ancestors_optimized(node_hash, tree_id)) for node_hash in hashes}
Expand All @@ -714,14 +697,13 @@ async def test_autoinsert_balances_from_scratch(data_store: DataStore, tree_id:
async def test_autoinsert_balances_gaps(data_store: DataStore, tree_id: bytes32) -> None:
random = Random()
random.seed(101, version=2)
hint_keys_values: Dict[bytes32, bytes32] = {}
hashes = []

for i in range(2000):
key = (i + 100).to_bytes(4, byteorder="big")
value = (i + 200).to_bytes(4, byteorder="big")
if i == 0 or i > 10:
insert_result = await data_store.autoinsert(key, value, tree_id, hint_keys_values, status=Status.COMMITTED)
insert_result = await data_store.autoinsert(key, value, tree_id, status=Status.COMMITTED)
else:
reference_node_hash = await data_store.get_terminal_node_for_seed(tree_id, bytes32([0] * 32))
insert_result = await data_store.insert(
Expand All @@ -730,7 +712,6 @@ async def test_autoinsert_balances_gaps(data_store: DataStore, tree_id: bytes32)
tree_id=tree_id,
reference_node_hash=reference_node_hash,
side=Side.LEFT,
hint_keys_values=hint_keys_values,
status=Status.COMMITTED,
)
ancestors = await data_store.get_ancestors_optimized(insert_result.node_hash, tree_id)
Expand All @@ -743,19 +724,10 @@ async def test_autoinsert_balances_gaps(data_store: DataStore, tree_id: bytes32)
assert 11 <= statistics.mean(heights.values()) <= 12


@pytest.mark.parametrize(
"use_hint",
[True, False],
)
@pytest.mark.anyio()
async def test_delete_from_left_both_terminal(data_store: DataStore, tree_id: bytes32, use_hint: bool) -> None:
async def test_delete_from_left_both_terminal(data_store: DataStore, tree_id: bytes32) -> None:
await add_01234567_example(data_store=data_store, tree_id=tree_id)

hint_keys_values = None
if use_hint:
kv_compressed = await data_store.get_keys_values_compressed(tree_id=tree_id)
hint_keys_values = kv_compressed.keys_values_hashed

expected = Program.to(
(
(
Expand All @@ -778,25 +750,16 @@ async def test_delete_from_left_both_terminal(data_store: DataStore, tree_id: by
),
)

await data_store.delete(key=b"\x04", tree_id=tree_id, hint_keys_values=hint_keys_values, status=Status.COMMITTED)
await data_store.delete(key=b"\x04", tree_id=tree_id, status=Status.COMMITTED)
result = await data_store.get_tree_as_program(tree_id=tree_id)

assert result == expected


@pytest.mark.parametrize(
"use_hint",
[True, False],
)
@pytest.mark.anyio()
async def test_delete_from_left_other_not_terminal(data_store: DataStore, tree_id: bytes32, use_hint: bool) -> None:
async def test_delete_from_left_other_not_terminal(data_store: DataStore, tree_id: bytes32) -> None:
await add_01234567_example(data_store=data_store, tree_id=tree_id)

hint_keys_values = None
if use_hint:
kv_compressed = await data_store.get_keys_values_compressed(tree_id=tree_id)
hint_keys_values = kv_compressed.keys_values_hashed

expected = Program.to(
(
(
Expand All @@ -816,26 +779,17 @@ async def test_delete_from_left_other_not_terminal(data_store: DataStore, tree_i
),
)

await data_store.delete(key=b"\x04", tree_id=tree_id, hint_keys_values=hint_keys_values, status=Status.COMMITTED)
await data_store.delete(key=b"\x05", tree_id=tree_id, hint_keys_values=hint_keys_values, status=Status.COMMITTED)
await data_store.delete(key=b"\x04", tree_id=tree_id, status=Status.COMMITTED)
await data_store.delete(key=b"\x05", tree_id=tree_id, status=Status.COMMITTED)
result = await data_store.get_tree_as_program(tree_id=tree_id)

assert result == expected


@pytest.mark.parametrize(
"use_hint",
[True, False],
)
@pytest.mark.anyio()
async def test_delete_from_right_both_terminal(data_store: DataStore, tree_id: bytes32, use_hint: bool) -> None:
async def test_delete_from_right_both_terminal(data_store: DataStore, tree_id: bytes32) -> None:
await add_01234567_example(data_store=data_store, tree_id=tree_id)

hint_keys_values = None
if use_hint:
kv_compressed = await data_store.get_keys_values_compressed(tree_id=tree_id)
hint_keys_values = kv_compressed.keys_values_hashed

expected = Program.to(
(
(
Expand All @@ -858,25 +812,16 @@ async def test_delete_from_right_both_terminal(data_store: DataStore, tree_id: b
),
)

await data_store.delete(key=b"\x03", tree_id=tree_id, hint_keys_values=hint_keys_values, status=Status.COMMITTED)
await data_store.delete(key=b"\x03", tree_id=tree_id, status=Status.COMMITTED)
result = await data_store.get_tree_as_program(tree_id=tree_id)

assert result == expected


@pytest.mark.parametrize(
"use_hint",
[True, False],
)
@pytest.mark.anyio()
async def test_delete_from_right_other_not_terminal(data_store: DataStore, tree_id: bytes32, use_hint: bool) -> None:
async def test_delete_from_right_other_not_terminal(data_store: DataStore, tree_id: bytes32) -> None:
await add_01234567_example(data_store=data_store, tree_id=tree_id)

hint_keys_values = None
if use_hint:
kv_compressed = await data_store.get_keys_values_compressed(tree_id=tree_id)
hint_keys_values = kv_compressed.keys_values_hashed

expected = Program.to(
(
(
Expand All @@ -896,8 +841,8 @@ async def test_delete_from_right_other_not_terminal(data_store: DataStore, tree_
),
)

await data_store.delete(key=b"\x03", tree_id=tree_id, hint_keys_values=hint_keys_values, status=Status.COMMITTED)
await data_store.delete(key=b"\x02", tree_id=tree_id, hint_keys_values=hint_keys_values, status=Status.COMMITTED)
await data_store.delete(key=b"\x03", tree_id=tree_id, status=Status.COMMITTED)
await data_store.delete(key=b"\x02", tree_id=tree_id, status=Status.COMMITTED)
result = await data_store.get_tree_as_program(tree_id=tree_id)

assert result == expected
Expand Down Expand Up @@ -1391,8 +1336,8 @@ async def mock_http_download(
server_info: ServerInfo,
timeout: int,
log: logging.Logger,
) -> bool:
return False
) -> None:
raise aiohttp.ClientConnectionError()

start_timestamp = int(time.time())
with monkeypatch.context() as m:
Expand Down Expand Up @@ -1836,3 +1781,14 @@ async def test_get_node_by_key_with_overlapping_keys(raw_data_store: DataStore)
await raw_data_store.insert_batch(tree_id, batch, status=Status.COMMITTED)
with pytest.raises(KeyNotFoundError, match=f"Key not found: {key.hex()}"):
await raw_data_store.get_node_by_key(tree_id=tree_id, key=key)


@pytest.mark.anyio
async def test_insert_key_already_present(data_store: DataStore, tree_id: bytes32) -> None:
key = b"foo"
value = b"bar"
await data_store.insert(
key=key, value=value, tree_id=tree_id, reference_node_hash=None, side=None, status=Status.COMMITTED
)
with pytest.raises(Exception, match=f"Key already present: {key.hex()}"):
await data_store.insert(key=key, value=value, tree_id=tree_id, reference_node_hash=None, side=None)
Loading

0 comments on commit 2d40fa6

Please sign in to comment.