Skip to content

Commit 747dc32

Browse files
committed
Merge branch 'feat/thewhaleking/python-ss58-conversion' into feat/thewhaleking/fully-exhaust-query-map
2 parents 1c9da05 + 9b6cd53 commit 747dc32

File tree

7 files changed

+157
-193
lines changed

7 files changed

+157
-193
lines changed

async_substrate_interface/async_substrate.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1378,9 +1378,9 @@ async def get_metadata_errors(
13781378

13791379
async def get_metadata_error(
13801380
self,
1381-
module_name,
1382-
error_name,
1383-
block_hash=None,
1381+
module_name: str,
1382+
error_name: str,
1383+
block_hash: Optional[str] = None,
13841384
runtime: Optional[Runtime] = None,
13851385
):
13861386
"""
@@ -3113,9 +3113,9 @@ async def get_metadata_constants(self, block_hash=None) -> list[dict]:
31133113

31143114
async def get_metadata_constant(
31153115
self,
3116-
module_name,
3117-
constant_name,
3118-
block_hash=None,
3116+
module_name: str,
3117+
constant_name: str,
3118+
block_hash: Optional[str] = None,
31193119
runtime: Optional[Runtime] = None,
31203120
):
31213121
"""

async_substrate_interface/types.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -649,8 +649,8 @@ def is_valid_ss58_address(self, value: str) -> bool:
649649
def serialize_storage_item(
650650
self,
651651
storage_item: ScaleType,
652-
module,
653-
spec_version_id,
652+
module: str,
653+
spec_version_id: int,
654654
runtime: Optional[Runtime] = None,
655655
) -> dict:
656656
"""

async_substrate_interface/utils/decoding.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -122,9 +122,11 @@ def concat_hash_len(key_hasher: str) -> int:
122122
middl_index = len(all_decoded) // 2
123123
decoded_keys = all_decoded[:middl_index]
124124
decoded_values = all_decoded[middl_index:]
125-
for (kts, vts), (dk, dv) in zip(
126-
zip(pre_decoded_key_types, pre_decoded_value_types),
127-
zip(decoded_keys, decoded_values),
125+
for kts, vts, dk, dv in zip(
126+
pre_decoded_key_types,
127+
pre_decoded_value_types,
128+
decoded_keys,
129+
decoded_values,
128130
):
129131
try:
130132
# strip key_hashers to use as item key
Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
import pytest
2+
from scalecodec import ss58_encode
3+
4+
from async_substrate_interface.async_substrate import AsyncSubstrateInterface
5+
from async_substrate_interface.types import ScaleObj
6+
from tests.helpers.settings import ARCHIVE_ENTRYPOINT, LATENT_LITE_ENTRYPOINT
7+
8+
9+
@pytest.mark.asyncio
10+
async def test_legacy_decoding():
11+
# roughly 4000 blocks before metadata v15 was added
12+
pre_metadata_v15_block = 3_010_611
13+
14+
async with AsyncSubstrateInterface(ARCHIVE_ENTRYPOINT) as substrate:
15+
block_hash = await substrate.get_block_hash(pre_metadata_v15_block)
16+
events = await substrate.get_events(block_hash)
17+
assert isinstance(events, list)
18+
19+
query_map_result = await substrate.query_map(
20+
module="SubtensorModule",
21+
storage_function="NetworksAdded",
22+
block_hash=block_hash,
23+
)
24+
async for key, value in query_map_result:
25+
assert isinstance(key, int)
26+
assert isinstance(value, ScaleObj)
27+
28+
timestamp = await substrate.query(
29+
"Timestamp",
30+
"Now",
31+
block_hash=block_hash,
32+
)
33+
assert timestamp.value == 1716358476004
34+
35+
36+
@pytest.mark.asyncio
37+
async def test_ss58_conversion():
38+
async with AsyncSubstrateInterface(
39+
LATENT_LITE_ENTRYPOINT, ss58_format=42, decode_ss58=False
40+
) as substrate:
41+
block_hash = await substrate.get_chain_finalised_head()
42+
qm = await substrate.query_map(
43+
"SubtensorModule",
44+
"OwnedHotkeys",
45+
block_hash=block_hash,
46+
)
47+
# only do the first page, bc otherwise this will be massive
48+
for key, value in qm.records:
49+
assert isinstance(key, tuple)
50+
assert isinstance(value, ScaleObj)
51+
assert isinstance(value.value, list)
52+
assert len(key) == 1
53+
for key_tuple in value.value:
54+
assert len(key_tuple[0]) == 32
55+
random_key = key_tuple[0]
56+
57+
ss58_of_key = ss58_encode(bytes(random_key), substrate.ss58_format)
58+
assert isinstance(ss58_of_key, str)
59+
60+
substrate.decode_ss58 = True # change to decoding True
61+
62+
qm = await substrate.query_map(
63+
"SubtensorModule",
64+
"OwnedHotkeys",
65+
block_hash=block_hash,
66+
)
67+
for key, value in qm.records:
68+
assert isinstance(key, str)
69+
assert isinstance(value, ScaleObj)
70+
assert isinstance(value.value, list)
71+
if len(value.value) > 0:
72+
for decoded_key in value.value:
73+
assert isinstance(decoded_key, str)
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
from scalecodec import ss58_encode
2+
3+
from async_substrate_interface.sync_substrate import SubstrateInterface
4+
from async_substrate_interface.types import ScaleObj
5+
from tests.helpers.settings import ARCHIVE_ENTRYPOINT, LATENT_LITE_ENTRYPOINT
6+
7+
8+
def test_legacy_decoding():
9+
# roughly 4000 blocks before metadata v15 was added
10+
pre_metadata_v15_block = 3_010_611
11+
12+
with SubstrateInterface(ARCHIVE_ENTRYPOINT) as substrate:
13+
block_hash = substrate.get_block_hash(pre_metadata_v15_block)
14+
events = substrate.get_events(block_hash)
15+
assert isinstance(events, list)
16+
17+
query_map_result = substrate.query_map(
18+
module="SubtensorModule",
19+
storage_function="NetworksAdded",
20+
block_hash=block_hash,
21+
)
22+
for key, value in query_map_result:
23+
assert isinstance(key, int)
24+
assert isinstance(value, ScaleObj)
25+
26+
timestamp = substrate.query(
27+
"Timestamp",
28+
"Now",
29+
block_hash=block_hash,
30+
)
31+
assert timestamp.value == 1716358476004
32+
33+
34+
def test_ss58_conversion():
35+
with SubstrateInterface(
36+
LATENT_LITE_ENTRYPOINT, ss58_format=42, decode_ss58=False
37+
) as substrate:
38+
block_hash = substrate.get_chain_finalised_head()
39+
qm = substrate.query_map(
40+
"SubtensorModule",
41+
"OwnedHotkeys",
42+
block_hash=block_hash,
43+
)
44+
# only do the first page, bc otherwise this will be massive
45+
for key, value in qm.records:
46+
assert isinstance(key, tuple)
47+
assert isinstance(value, ScaleObj)
48+
assert isinstance(value.value, list)
49+
assert len(key) == 1
50+
for key_tuple in value.value:
51+
assert len(key_tuple[0]) == 32
52+
random_key = key_tuple[0]
53+
54+
ss58_of_key = ss58_encode(bytes(random_key), substrate.ss58_format)
55+
assert isinstance(ss58_of_key, str)
56+
57+
substrate.decode_ss58 = True # change to decoding True
58+
59+
qm = substrate.query_map(
60+
"SubtensorModule",
61+
"OwnedHotkeys",
62+
block_hash=block_hash,
63+
)
64+
for key, value in qm.records:
65+
assert isinstance(key, str)
66+
assert isinstance(value, ScaleObj)
67+
assert isinstance(value.value, list)
68+
if len(value.value) > 0:
69+
for decoded_key in value.value:
70+
assert isinstance(decoded_key, str)
Lines changed: 1 addition & 113 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,11 @@
11
import asyncio
2-
import time
32
from unittest.mock import AsyncMock, MagicMock, ANY
43

54
import pytest
6-
from scalecodec import ss58_encode
75
from websockets.exceptions import InvalidURI
86

97
from async_substrate_interface.async_substrate import AsyncSubstrateInterface
108
from async_substrate_interface.types import ScaleObj
11-
from tests.helpers.settings import ARCHIVE_ENTRYPOINT, LATENT_LITE_ENTRYPOINT
129

1310

1411
@pytest.mark.asyncio
@@ -102,7 +99,7 @@ async def test_runtime_call(monkeypatch):
10299
@pytest.mark.asyncio
103100
async def test_websocket_shutdown_timer():
104101
# using default ws shutdown timer of 5.0 seconds
105-
async with AsyncSubstrateInterface(LATENT_LITE_ENTRYPOINT) as substrate:
102+
async with AsyncSubstrateInterface("wss://lite.sub.latent.to:443") as substrate:
106103
await substrate.get_chain_head()
107104
await asyncio.sleep(6)
108105
assert (
@@ -116,112 +113,3 @@ async def test_websocket_shutdown_timer():
116113
await substrate.get_chain_head()
117114
await asyncio.sleep(6) # same sleep time as before
118115
assert substrate.ws._initialized is True # connection should still be open
119-
120-
121-
@pytest.mark.asyncio
122-
async def test_legacy_decoding():
123-
# roughly 4000 blocks before metadata v15 was added
124-
pre_metadata_v15_block = 3_010_611
125-
126-
async with AsyncSubstrateInterface(ARCHIVE_ENTRYPOINT) as substrate:
127-
block_hash = await substrate.get_block_hash(pre_metadata_v15_block)
128-
events = await substrate.get_events(block_hash)
129-
assert isinstance(events, list)
130-
131-
query_map_result = await substrate.query_map(
132-
module="SubtensorModule",
133-
storage_function="NetworksAdded",
134-
block_hash=block_hash,
135-
)
136-
async for key, value in query_map_result:
137-
assert isinstance(key, int)
138-
assert isinstance(value, ScaleObj)
139-
140-
timestamp = await substrate.query(
141-
"Timestamp",
142-
"Now",
143-
block_hash=block_hash,
144-
)
145-
assert timestamp.value == 1716358476004
146-
147-
148-
@pytest.mark.asyncio
149-
async def test_ss58_conversion():
150-
async with AsyncSubstrateInterface(
151-
LATENT_LITE_ENTRYPOINT, ss58_format=42, decode_ss58=False
152-
) as substrate:
153-
block_hash = await substrate.get_chain_finalised_head()
154-
qm = await substrate.query_map(
155-
"SubtensorModule",
156-
"OwnedHotkeys",
157-
block_hash=block_hash,
158-
)
159-
# only do the first page, bc otherwise this will be massive
160-
for key, value in qm.records:
161-
assert isinstance(key, tuple)
162-
assert isinstance(value, ScaleObj)
163-
assert isinstance(value.value, list)
164-
assert len(key) == 1
165-
for key_tuple in value.value:
166-
assert len(key_tuple[0]) == 32
167-
random_key = key_tuple[0]
168-
169-
ss58_of_key = ss58_encode(bytes(random_key), substrate.ss58_format)
170-
assert isinstance(ss58_of_key, str)
171-
172-
substrate.decode_ss58 = True # change to decoding True
173-
174-
qm = await substrate.query_map(
175-
"SubtensorModule",
176-
"OwnedHotkeys",
177-
block_hash=block_hash,
178-
)
179-
for key, value in qm.records:
180-
assert isinstance(key, str)
181-
assert isinstance(value, ScaleObj)
182-
assert isinstance(value.value, list)
183-
if len(value.value) > 0:
184-
for decoded_key in value.value:
185-
assert isinstance(decoded_key, str)
186-
187-
188-
@pytest.mark.asyncio
189-
async def test_fully_exhaust_query_map():
190-
async with AsyncSubstrateInterface(LATENT_LITE_ENTRYPOINT) as substrate:
191-
block_hash = await substrate.get_chain_finalised_head()
192-
non_fully_exhauster_start = time.time()
193-
non_fully_exhausted_qm = await substrate.query_map(
194-
"SubtensorModule",
195-
"CRV3WeightCommits",
196-
block_hash=block_hash,
197-
)
198-
initial_records_count = len(non_fully_exhausted_qm.records)
199-
assert initial_records_count <= 100 # default page size
200-
exhausted_records_count = 0
201-
async for _ in non_fully_exhausted_qm:
202-
exhausted_records_count += 1
203-
non_fully_exhausted_time = time.time() - non_fully_exhauster_start
204-
205-
assert len(non_fully_exhausted_qm.records) >= initial_records_count
206-
fully_exhausted_start = time.time()
207-
fully_exhausted_qm = await substrate.query_map(
208-
"SubtensorModule",
209-
"CRV3WeightCommits",
210-
block_hash=block_hash,
211-
fully_exhaust=True,
212-
)
213-
214-
fully_exhausted_time = time.time() - fully_exhausted_start
215-
initial_records_count_fully_exhaust = len(fully_exhausted_qm.records)
216-
assert fully_exhausted_time <= non_fully_exhausted_time, (
217-
f"Fully exhausted took longer than non-fully exhausted with "
218-
f"{len(non_fully_exhausted_qm.records)} records in non-fully exhausted "
219-
f"in {non_fully_exhausted_time} seconds, and {initial_records_count_fully_exhaust} in fully exhausted"
220-
f" in {fully_exhausted_time} seconds. This could be caused by the fact that on this specific block, "
221-
f"there are fewer records than take up a single page. This difference should still be small."
222-
)
223-
fully_exhausted_records_count = 0
224-
async for _ in fully_exhausted_qm:
225-
fully_exhausted_records_count += 1
226-
assert fully_exhausted_records_count == initial_records_count_fully_exhaust
227-
assert initial_records_count_fully_exhaust == exhausted_records_count

0 commit comments

Comments
 (0)