Skip to content

Commit 2d596e7

Browse files
Merge pull request #75 from opentensor/release/1.0.6
Release/1.0.6
2 parents e10a9de + a07ca5c commit 2d596e7

File tree

5 files changed

+187
-13
lines changed

5 files changed

+187
-13
lines changed

CHANGELOG.md

+7
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,12 @@
11
# Changelog
22

3+
## 1.0.6 /2025-03-12
4+
5+
## What's Changed
6+
* On-disk cache by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/67
7+
8+
**Full Changelog**: https://github.com/opentensor/async-substrate-interface/compare/v1.0.5...v1.0.6
9+
310
## 1.0.5 /2025-03-06
411

512
## What's Changed

async_substrate_interface/async_substrate.py

+39-5
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@
4949
Preprocessed,
5050
)
5151
from async_substrate_interface.utils import hex_to_bytes, json, get_next_id
52+
from async_substrate_interface.utils.cache import async_sql_lru_cache
5253
from async_substrate_interface.utils.decoding import (
5354
_determine_if_old_runtime_call,
5455
_bt_decode_to_dict_or_list,
@@ -1659,8 +1660,11 @@ def convert_event_data(data):
16591660
events.append(convert_event_data(item))
16601661
return events
16611662

1662-
@a.lru_cache(maxsize=512) # large cache with small items
1663+
@a.lru_cache(maxsize=512)
16631664
async def get_parent_block_hash(self, block_hash):
1665+
return await self._get_parent_block_hash(block_hash)
1666+
1667+
async def _get_parent_block_hash(self, block_hash):
16641668
block_header = await self.rpc_request("chain_getHeader", [block_hash])
16651669

16661670
if block_header["result"] is None:
@@ -1672,16 +1676,22 @@ async def get_parent_block_hash(self, block_hash):
16721676
return block_hash
16731677
return parent_block_hash
16741678

1675-
@a.lru_cache(maxsize=16) # small cache with large items
1679+
@a.lru_cache(maxsize=16)
16761680
async def get_block_runtime_info(self, block_hash: str) -> dict:
1681+
return await self._get_block_runtime_info(block_hash)
1682+
1683+
async def _get_block_runtime_info(self, block_hash: str) -> dict:
16771684
"""
16781685
Retrieve the runtime info of given block_hash
16791686
"""
16801687
response = await self.rpc_request("state_getRuntimeVersion", [block_hash])
16811688
return response.get("result")
16821689

1683-
@a.lru_cache(maxsize=512) # large cache with small items
1690+
@a.lru_cache(maxsize=512)
16841691
async def get_block_runtime_version_for(self, block_hash: str):
1692+
return await self._get_block_runtime_version_for(block_hash)
1693+
1694+
async def _get_block_runtime_version_for(self, block_hash: str):
16851695
"""
16861696
Retrieve the runtime version of the parent of a given block_hash
16871697
"""
@@ -1914,7 +1924,6 @@ async def _make_rpc_request(
19141924

19151925
return request_manager.get_results()
19161926

1917-
@a.lru_cache(maxsize=512) # RPC methods are unlikely to change often
19181927
async def supports_rpc_method(self, name: str) -> bool:
19191928
"""
19201929
Check if substrate RPC supports given method
@@ -1985,8 +1994,11 @@ async def rpc_request(
19851994
else:
19861995
raise SubstrateRequestException(result[payload_id][0])
19871996

1988-
@a.lru_cache(maxsize=512) # block_id->block_hash does not change
1997+
@a.lru_cache(maxsize=512)
19891998
async def get_block_hash(self, block_id: int) -> str:
1999+
return await self._get_block_hash(block_id)
2000+
2001+
async def _get_block_hash(self, block_id: int) -> str:
19902002
return (await self.rpc_request("chain_getBlockHash", [block_id]))["result"]
19912003

19922004
async def get_chain_head(self) -> str:
@@ -3230,6 +3242,28 @@ async def _handler(block_data: dict[str, Any]):
32303242
return await co
32313243

32323244

3245+
class DiskCachedAsyncSubstrateInterface(AsyncSubstrateInterface):
3246+
"""
3247+
Experimental new class that uses disk-caching in addition to memory-caching for the cached methods
3248+
"""
3249+
3250+
@async_sql_lru_cache(maxsize=512)
3251+
async def get_parent_block_hash(self, block_hash):
3252+
return await self._get_parent_block_hash(block_hash)
3253+
3254+
@async_sql_lru_cache(maxsize=16)
3255+
async def get_block_runtime_info(self, block_hash: str) -> dict:
3256+
return await self._get_block_runtime_info(block_hash)
3257+
3258+
@async_sql_lru_cache(maxsize=512)
3259+
async def get_block_runtime_version_for(self, block_hash: str):
3260+
return await self._get_block_runtime_version_for(block_hash)
3261+
3262+
@async_sql_lru_cache(maxsize=512)
3263+
async def get_block_hash(self, block_id: int) -> str:
3264+
return await self._get_block_hash(block_id)
3265+
3266+
32333267
async def get_async_substrate_interface(
32343268
url: str,
32353269
use_remote_preset: bool = False,

async_substrate_interface/sync_substrate.py

+6-7
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1+
import functools
12
import logging
23
import random
3-
from functools import lru_cache
44
from hashlib import blake2b
55
from typing import Optional, Union, Callable, Any
66

@@ -1407,7 +1407,7 @@ def convert_event_data(data):
14071407
events.append(convert_event_data(item))
14081408
return events
14091409

1410-
@lru_cache(maxsize=512) # large cache with small items
1410+
@functools.lru_cache(maxsize=512)
14111411
def get_parent_block_hash(self, block_hash):
14121412
block_header = self.rpc_request("chain_getHeader", [block_hash])
14131413

@@ -1420,15 +1420,15 @@ def get_parent_block_hash(self, block_hash):
14201420
return block_hash
14211421
return parent_block_hash
14221422

1423-
@lru_cache(maxsize=16) # small cache with large items
1423+
@functools.lru_cache(maxsize=16)
14241424
def get_block_runtime_info(self, block_hash: str) -> dict:
14251425
"""
14261426
Retrieve the runtime info of given block_hash
14271427
"""
14281428
response = self.rpc_request("state_getRuntimeVersion", [block_hash])
14291429
return response.get("result")
14301430

1431-
@lru_cache(maxsize=512) # large cache with small items
1431+
@functools.lru_cache(maxsize=512)
14321432
def get_block_runtime_version_for(self, block_hash: str):
14331433
"""
14341434
Retrieve the runtime version of the parent of a given block_hash
@@ -1656,8 +1656,7 @@ def _make_rpc_request(
16561656

16571657
return request_manager.get_results()
16581658

1659-
# TODO change this logic
1660-
@lru_cache(maxsize=512) # RPC methods are unlikely to change often
1659+
@functools.lru_cache(maxsize=512)
16611660
def supports_rpc_method(self, name: str) -> bool:
16621661
"""
16631662
Check if substrate RPC supports given method
@@ -1728,7 +1727,7 @@ def rpc_request(
17281727
else:
17291728
raise SubstrateRequestException(result[payload_id][0])
17301729

1731-
@lru_cache(maxsize=512) # block_id->block_hash does not change
1730+
@functools.lru_cache(maxsize=512)
17321731
def get_block_hash(self, block_id: int) -> str:
17331732
return self.rpc_request("chain_getBlockHash", [block_id])["result"]
17341733

+134
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,134 @@
1+
import functools
2+
import os
3+
import pickle
4+
import sqlite3
5+
import asyncstdlib as a
6+
7+
USE_CACHE = True if os.getenv("NO_CACHE") != "1" else False
8+
CACHE_LOCATION = (
9+
os.path.expanduser(
10+
os.getenv("CACHE_LOCATION", "~/.cache/async-substrate-interface")
11+
)
12+
if USE_CACHE
13+
else ":memory:"
14+
)
15+
16+
17+
def _get_table_name(func):
18+
"""Convert "ClassName.method_name" to "ClassName_method_name"""
19+
return func.__qualname__.replace(".", "_")
20+
21+
22+
def _check_if_local(chain: str) -> bool:
23+
return any([x in chain for x in ["127.0.0.1", "localhost", "0.0.0.0"]])
24+
25+
26+
def _create_table(c, conn, table_name):
27+
c.execute(
28+
f"""CREATE TABLE IF NOT EXISTS {table_name}
29+
(
30+
rowid INTEGER PRIMARY KEY AUTOINCREMENT,
31+
key BLOB,
32+
value BLOB,
33+
chain TEXT,
34+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
35+
);
36+
"""
37+
)
38+
c.execute(
39+
f"""CREATE TRIGGER IF NOT EXISTS prune_rows_trigger AFTER INSERT ON {table_name}
40+
BEGIN
41+
DELETE FROM {table_name}
42+
WHERE rowid IN (
43+
SELECT rowid FROM {table_name}
44+
ORDER BY created_at DESC
45+
LIMIT -1 OFFSET 500
46+
);
47+
END;"""
48+
)
49+
conn.commit()
50+
51+
52+
def _retrieve_from_cache(c, table_name, key, chain):
53+
try:
54+
c.execute(
55+
f"SELECT value FROM {table_name} WHERE key=? AND chain=?", (key, chain)
56+
)
57+
result = c.fetchone()
58+
if result is not None:
59+
return pickle.loads(result[0])
60+
except (pickle.PickleError, sqlite3.Error) as e:
61+
print(f"Cache error: {str(e)}")
62+
pass
63+
64+
65+
def _insert_into_cache(c, conn, table_name, key, result, chain):
66+
try:
67+
c.execute(
68+
f"INSERT OR REPLACE INTO {table_name} (key, value, chain) VALUES (?,?,?)",
69+
(key, pickle.dumps(result), chain),
70+
)
71+
conn.commit()
72+
except (pickle.PickleError, sqlite3.Error) as e:
73+
print(f"Cache error: {str(e)}")
74+
pass
75+
76+
77+
def sql_lru_cache(maxsize=None):
78+
def decorator(func):
79+
conn = sqlite3.connect(CACHE_LOCATION)
80+
c = conn.cursor()
81+
table_name = _get_table_name(func)
82+
_create_table(c, conn, table_name)
83+
84+
@functools.lru_cache(maxsize=maxsize)
85+
def inner(self, *args, **kwargs):
86+
c = conn.cursor()
87+
key = pickle.dumps((args, kwargs))
88+
chain = self.url
89+
if not (local_chain := _check_if_local(chain)) or not USE_CACHE:
90+
result = _retrieve_from_cache(c, table_name, key, chain)
91+
if result is not None:
92+
return result
93+
94+
# If not in DB, call func and store in DB
95+
result = func(self, *args, **kwargs)
96+
97+
if not local_chain or not USE_CACHE:
98+
_insert_into_cache(c, conn, table_name, key, result, chain)
99+
100+
return result
101+
102+
return inner
103+
104+
return decorator
105+
106+
107+
def async_sql_lru_cache(maxsize=None):
108+
def decorator(func):
109+
conn = sqlite3.connect(CACHE_LOCATION)
110+
c = conn.cursor()
111+
table_name = _get_table_name(func)
112+
_create_table(c, conn, table_name)
113+
114+
@a.lru_cache(maxsize=maxsize)
115+
async def inner(self, *args, **kwargs):
116+
c = conn.cursor()
117+
key = pickle.dumps((args, kwargs))
118+
chain = self.url
119+
120+
if not (local_chain := _check_if_local(chain)) or not USE_CACHE:
121+
result = _retrieve_from_cache(c, table_name, key, chain)
122+
if result is not None:
123+
return result
124+
125+
# If not in DB, call func and store in DB
126+
result = await func(self, *args, **kwargs)
127+
if not local_chain or not USE_CACHE:
128+
_insert_into_cache(c, conn, table_name, key, result, chain)
129+
130+
return result
131+
132+
return inner
133+
134+
return decorator

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "async-substrate-interface"
3-
version = "1.0.5"
3+
version = "1.0.6"
44
description = "Asyncio library for interacting with substrate. Mostly API-compatible with py-substrate-interface"
55
readme = "README.md"
66
license = { file = "LICENSE" }

0 commit comments

Comments
 (0)