Skip to content

Commit

Permalink
bring back dontfetch data structure
Browse files Browse the repository at this point in the history
  • Loading branch information
charles-cooper committed Feb 15, 2024
1 parent af2a829 commit 0f79bb2
Showing 1 changed file with 41 additions and 13 deletions.
54 changes: 41 additions & 13 deletions boa/vm/fork.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import os
import sys
from typing import Any, Type

from requests import HTTPError
Expand All @@ -12,6 +13,7 @@
from eth.db.account import AccountDB, keccak
from eth.db.backends.memory import MemoryDB
from eth.db.cache import CacheDB
from eth.db.journal import JournalDB
from eth.rlp.accounts import Account
from eth.vm.interrupt import MissingBytecode
from eth.vm.message import Message
Expand All @@ -28,6 +30,7 @@


_EMPTY = b"" # empty rlp stuff
_HAS_KEY = b"\x01" # could be anything


class CachingRPC(RPC):
Expand All @@ -39,6 +42,8 @@ def __init__(self, rpc: RPC, cache_file: str = DEFAULT_CACHE_DIR):
try:
from boa.util.leveldb import LevelDB

print("(using leveldb)", file=sys.stderr)

cache_file = os.path.expanduser(cache_file)
# use CacheDB as an additional layer over disk
# (ideally would use leveldb lru cache but it's not configurable
Expand Down Expand Up @@ -134,6 +139,8 @@ def __init__(self, *args, **kwargs2):
def __init__(self, rpc: CachingRPC, block_identifier: str, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)

self._dontfetch = JournalDB(MemoryDB())

self._rpc = rpc

if block_identifier not in _PREDEFINED_BLOCKS:
Expand Down Expand Up @@ -255,26 +262,47 @@ def get_code(self, address):
)
return to_bytes(ret)

def discard(self, checkpoint):
super().discard(checkpoint)
self._dontfetch.discard(checkpoint)

def commit(self, checkpoint):
super().commit(checkpoint)
self._dontfetch.commit(checkpoint)

def record(self):
checkpoint = super().record()
self._dontfetch.record(checkpoint)
return checkpoint

# helper to determine if something is in the storage db
# or we need to get from RPC
def _helper_have_storage(self, address, slot, from_journal=True):
if not from_journal:
db = super()._get_address_store(address)._locked_changes
key = int_to_big_endian(slot)
return db.get(key, _EMPTY) != _EMPTY

key = self._get_storage_tracker_key(address, slot)
return self._dontfetch.get(key) == _HAS_KEY

def get_storage(self, address, slot, from_journal=True):
# call super for address warming semantics
val = super().get_storage(address, slot, from_journal)

# check if we have the storage locally in the VM already
# cf. AccountStorageDB.get()
store = super()._get_address_store(address)
key = int_to_big_endian(slot)
if store._locked_changes.get(key, _EMPTY) != _EMPTY:
return val
if from_journal and store._journal_storage.get(key, _EMPTY) != _EMPTY:
if self._helper_have_storage(address, slot, from_journal=from_journal):
return val

addr = to_checksum_address(address)
res = self._rpc.fetch("eth_getStorageAt", [addr, to_hex(slot), self._block_id])
ret = to_int(res)
# Save value to local storage
store._locked_changes[key] = rlp.encode(ret)
raw_val = self._rpc.fetch(
"eth_getStorageAt", [addr, to_hex(slot), self._block_id]
)
return to_int(raw_val)

return ret
def set_storage(self, address, slot, value):
super().set_storage(address, slot, value)
# mark don't fetch
key = self._get_storage_tracker_key(address, slot)
self._dontfetch[key] = _HAS_KEY

def account_exists(self, address):
if super().account_exists(address):
Expand Down

0 comments on commit 0f79bb2

Please sign in to comment.