Skip to content

Commit

Permalink
merge with head
Browse files Browse the repository at this point in the history
  • Loading branch information
gofman8 committed Oct 27, 2023
2 parents cd36f90 + 2ef8985 commit 69ca0d6
Show file tree
Hide file tree
Showing 54 changed files with 1,313 additions and 307 deletions.
25 changes: 0 additions & 25 deletions .github/dependabot.yml

This file was deleted.

8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ a transaction that is pending to be sent to the blockchain.

## Index of contents

- [Docs](https://docs.gnosis-safe.io/backend/service-architecture)
- [Docs](https://docs.safe.global/safe-core-api/service-architecture)
- [Deploying the service](https://github.com/safe-global/safe-infrastructure)

## Setup for development
Expand Down Expand Up @@ -178,16 +178,16 @@ are deleted and indexing is restarted to the last `confirmed` block.

### If I add my chain to [safe-eth-py](https://github.com/safe-global/safe-eth-py/blob/master/gnosis/safe/addresses.py) will you support it?
No, for a chain to be supported we need to set up a dedicated infra for that network
and [have a proper RPC](https://docs.safe.global/learn/infrastructure/rpc-requirements)
and [have a proper RPC](https://docs.safe.global/safe-core-api/rpc-requirements)

### How can I interact with service?
Aside from using standard HTTP requests:
- [Safe API Kit](https://github.com/safe-global/safe-core-sdk/tree/main/packages/safe-service-client)
- [Safe{Core} API Kit](https://github.com/safe-global/safe-core-sdk/tree/main/packages/api-kit)
- [Safe-eth-py](https://github.com/safe-global/safe-eth-py)
- [Safe CLI](https://github.com/5afe/safe-cli): It has a `tx-service` mode to gather offchain signatures.

### What chains do you officially support?
https://docs.safe.global/learn/safe-core/safe-core-api/available-services
https://docs.safe.global/safe-core-api/available-services

### What means banned field in SafeContract model?
The `banned` field in the `SafeContract` model is used to prevent indexing of certain Safes that have an unsupported `MasterCopy` or unverified proxies that have issues during indexing. This field does not remove the banned Safe and indexing can be resumed once the issue has been resolved.
Expand Down
8 changes: 8 additions & 0 deletions config/gunicorn.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
"""
Store gunicorn variables in this file, so they can be read by Django
"""
import os

gunicorn_request_timeout = os.environ.get("WEB_WORKER_TIMEOUT", 60)
gunicorn_worker_connections = os.environ.get("WEB_WORKER_CONNECTIONS", 1000)
gunicorn_workers = os.environ.get("WEB_CONCURRENCY", 2)
34 changes: 32 additions & 2 deletions config/settings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,12 @@
import environ
from corsheaders.defaults import default_headers as default_cors_headers

from ..gunicorn import (
gunicorn_request_timeout,
gunicorn_worker_connections,
gunicorn_workers,
)

ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent.parent
APPS_DIR = ROOT_DIR / "safe_transaction_service"

Expand Down Expand Up @@ -47,6 +53,11 @@
# Enable analytics endpoints
ENABLE_ANALYTICS = env("ENABLE_ANALYTICS", default=False)

# GUNICORN
GUNICORN_REQUEST_TIMEOUT = gunicorn_request_timeout
GUNICORN_WORKER_CONNECTIONS = gunicorn_worker_connections
GUNICORN_WORKERS = gunicorn_workers

# DATABASES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
Expand Down Expand Up @@ -212,8 +223,18 @@
CELERY_BROKER_URL = env("CELERY_BROKER_URL", default="django://")
# https://docs.celeryproject.org/en/stable/userguide/optimizing.html#broker-connection-pools
# https://docs.celeryq.dev/en/latest/userguide/optimizing.html#broker-connection-pools
CELERY_BROKER_POOL_LIMIT = env(
"CELERY_BROKER_POOL_LIMIT", default=env("CELERYD_CONCURRENCY", default=1000)
# Configured to 0 due to connection issues https://github.com/celery/celery/issues/4355
CELERY_BROKER_POOL_LIMIT = env.int("CELERY_BROKER_POOL_LIMIT", default=0)
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#broker-heartbeat
CELERY_BROKER_HEARTBEAT = env.int("CELERY_BROKER_HEARTBEAT", default=0)

# https://docs.celeryq.dev/en/stable/userguide/configuration.html#std-setting-broker_connection_max_retries
CELERY_BROKER_CONNECTION_MAX_RETRIES = env.int(
"CELERY_BROKER_CONNECTION_MAX_RETRIES", default=0
)
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#broker-channel-error-retry
CELERY_BROKER_CHANNEL_ERROR_RETRY = env.bool(
"CELERY_BROKER_CHANNEL_ERROR_RETRY", default=True
)
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_backend
CELERY_RESULT_BACKEND = env("CELERY_RESULT_BACKEND", default="redis://")
Expand All @@ -234,6 +255,7 @@
CELERY_TASK_QUEUE_MAX_PRIORITY = 10
# https://docs.celeryproject.org/en/latest/userguide/configuration.html#broker-transport-options
CELERY_BROKER_TRANSPORT_OPTIONS = {}

# https://docs.celeryq.dev/en/stable/userguide/configuration.html#std-setting-task_routes
CELERY_ROUTES = (
[
Expand Down Expand Up @@ -453,6 +475,9 @@
ETH_EVENTS_UPDATED_BLOCK_BEHIND = env.int(
"ETH_EVENTS_UPDATED_BLOCK_BEHIND", default=24 * 60 * 60 // 15
) # Number of blocks to consider an address 'almost updated'.
ETH_REORG_BLOCKS_BATCH = env.int(
"ETH_REORG_BLOCKS_BATCH", default=250
) # Number of blocks to be checked in the same batch for reorgs
ETH_REORG_BLOCKS = env.int(
"ETH_REORG_BLOCKS", default=200 if ETH_L2_NETWORK else 10
) # Number of blocks from the current block number needed to consider a block valid/stable
Expand Down Expand Up @@ -502,6 +527,11 @@
EVENTS_QUEUE_ASYNC_CONNECTION = env("EVENTS_QUEUE_ASYNC_CONNECTION", default=False)
EVENTS_QUEUE_EXCHANGE_NAME = env("EVENTS_QUEUE_EXCHANGE_NAME", default="amq.fanout")

# Cache
CACHE_ALL_TXS_VIEW = env.int(
"CACHE_ALL_TXS_VIEW", default=10 * 60
) # 10 minutes. 0 is disabled

# AWS S3 https://github.com/etianen/django-s3-storage
# ------------------------------------------------------------------------------
# AWS_QUERYSTRING_AUTH = False # Remove query parameter authentication from generated URLs
Expand Down
4 changes: 3 additions & 1 deletion docker/web/celery/scheduler/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,6 @@ fi
sleep 10

echo "==> $(date +%H:%M:%S) ==> Running Celery beat <=="
exec celery -C -A config.celery_app beat -S django_celery_beat.schedulers:DatabaseScheduler --loglevel $log_level
exec celery -C -A config.celery_app beat \
-S django_celery_beat.schedulers:DatabaseScheduler \
--loglevel $log_level
3 changes: 2 additions & 1 deletion docker/web/celery/worker/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -33,4 +33,5 @@ exec celery -C -A config.celery_app worker \
--concurrency=${TASK_CONCURRENCY} \
--max-memory-per-child=${MAX_MEMORY_PER_CHILD} \
--max-tasks-per-child=${MAX_TASKS_PER_CHILD} \
-Q "$WORKER_QUEUES"
--without-heartbeat --without-gossip \
--without-mingle -Q "$WORKER_QUEUES"
26 changes: 11 additions & 15 deletions gunicorn.conf.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
from config.gunicorn import (
gunicorn_request_timeout,
gunicorn_worker_connections,
gunicorn_workers,
)

access_logfile = "-"
error_logfile = "-"
max_requests = 20_000 # Restart a worker after it has processed a given number of requests (for memory leaks)
Expand All @@ -10,19 +16,9 @@
log_level = "info"
logger_class = "safe_transaction_service.utils.loggers.CustomGunicornLogger"
preload_app = False # Load application code before the worker processes are forked (problems with gevent patching)
timeout = (
60 # Worker will be restarted if it doesn't answer in more than configured seconds
)
worker_class = "gevent"
worker_connections = 2000


def post_fork(server, worker):
try:
from psycogreen.gevent import patch_psycopg
# For timeout to work with gevent, a custom GeventWorker needs to be used
timeout = gunicorn_request_timeout

worker.log.info("Making Psycopg2 Green")
patch_psycopg()
worker.log.info("Made Psycopg2 Green")
except ImportError:
worker.log.info("Psycopg2 not patched")
worker_class = "gunicorn_custom_workers.MyGeventWorker" # "gevent"
worker_connections = gunicorn_worker_connections
workers = gunicorn_workers
21 changes: 21 additions & 0 deletions gunicorn_custom_workers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import gevent
from gunicorn.workers.ggevent import GeventWorker
from psycogreen.gevent import patch_psycopg


class MyGeventWorker(GeventWorker):
def patch_psycopg2(self):
patch_psycopg()
self.log.info("Patched Psycopg2 for gevent")

def patch(self):
super().patch()
self.log.info("Patched all for gevent")
self.patch_psycopg2()

def handle_request(self, listener_name, req, sock, addr):
try:
with gevent.Timeout(self.cfg.timeout):
super().handle_request(listener_name, req, sock, addr)
except gevent.Timeout:
self.log.error("TimeoutError on %s", req.path)
16 changes: 8 additions & 8 deletions requirements-test.txt
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
-r requirements.txt
coverage==7.2.7
django-stubs==4.2.1
coverage==7.3.1
django-stubs==4.2.4
django-test-migrations==1.3.0
factory-boy==3.2.1
faker==18.10.1
mypy==1.0.1
pytest==7.4.0
factory-boy==3.3.0
faker==19.6.1
mypy==1.5.1
pytest==7.4.2
pytest-celery==0.0.0
pytest-django==4.5.2
pytest-env==0.8.2
pytest-rerunfailures==11.1.2
pytest-env==1.0.1
pytest-rerunfailures==12.0
pytest-sugar==0.9.7
36 changes: 18 additions & 18 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,37 +1,37 @@
boto3==1.26.151
boto3==1.28.44
cachetools==5.3.1
celery==5.2.7
django==4.2.2
django-cache-memoize==0.1.10
celery==5.3.4
django==4.2.4
django-cache-memoize==0.2.0
django-celery-beat==2.5.0
django-cors-headers==4.0.0
django-cors-headers==4.2.0
django-db-geventpool==4.0.1
django-debug-toolbar
django-debug-toolbar-force
django-environ==0.10.0
django-environ==0.11.2
django-extensions==3.2.3
django-filter==23.2
django-filter==23.3
django-imagekit==4.1.0
django-model-utils==4.3.1
django-redis==5.2.0
django-redis==5.3.0
django-s3-storage==0.14.0
django-timezone-field==5.1
django-timezone-field==6.0.1
djangorestframework==3.14.0
djangorestframework-camel-case==1.4.2
docutils==0.20.1
drf-yasg[validation]==1.21.5
drf-yasg[validation]==1.21.7
firebase-admin==6.2.0
flower==1.2.0
gunicorn[gevent]==20.1.0
hexbytes==0.2.3
flower==2.0.1
gunicorn[gevent]==21.2.0
hexbytes==0.3.1
hiredis==2.2.3
packaging>=21.0
pika==1.3.2
pillow==9.5.0
pillow==10.0.1
psycogreen==1.0.2
psycopg2==2.9.6
redis==4.5.5
psycopg2==2.9.7
redis==5.0.0
requests==2.31.0
git+https://github.com/protofire/safe-eth-py.git@rsk#egg=safe-eth-py
#safe-eth-py[django]==5.5.0
web3==6.5.0
#safe-eth-py[django]==5.8.0
web3==6.9.0
2 changes: 1 addition & 1 deletion safe_transaction_service/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "4.21.2"
__version__ = "4.26.0"
__version_info__ = tuple(
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
Expand Down
8 changes: 8 additions & 0 deletions safe_transaction_service/contracts/tx_decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,14 @@ class MultisendDecoded(TypedDict):

@cache
def get_db_tx_decoder() -> "DbTxDecoder":
"""
:return: Tx decoder with every ABI in the database loaded and indexed by function opcode
.. note::
Be careful when calling this function in a concurrent way, as if cache is not generated it will compute
the ``DbTxDecoder`` multiple times, and depending on the number of Contracts in the database it could
take a lot.
"""

def _get_db_tx_decoder() -> "DbTxDecoder":
return DbTxDecoder()

Expand Down
13 changes: 13 additions & 0 deletions safe_transaction_service/history/apps.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import sys

from django.apps import AppConfig


Expand All @@ -7,3 +9,14 @@ class HistoryConfig(AppConfig):

def ready(self):
from . import signals # noqa

for argument in sys.argv:
if "gunicorn" in argument: # pragma: no cover
# Just run this on production
# TODO Find a better way
from safe_transaction_service.contracts.tx_decoder import (
get_db_tx_decoder,
)

get_db_tx_decoder() # Build tx decoder cache
break
2 changes: 1 addition & 1 deletion safe_transaction_service/history/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def is_valid_unique_transfer_id(unique_transfer_id: str) -> bool:
:return: ``True`` for a valid ``unique_transfer_id``, ``False`` otherwise
"""
token_transfer_id_pattern = r"^(e)([a-fA-F0-9]{64})(\d+)"
internal_transfer_id_pattern = r"^(i)([a-fA-F0-9]{64})(\d+)(,\d+)*"
internal_transfer_id_pattern = r"^(i)([a-fA-F0-9]{64})(\d*)(,\d+)*"

return bool(
re.fullmatch(token_transfer_id_pattern, unique_transfer_id)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,15 @@
class Erc20EventsIndexerProvider:
def __new__(cls):
if not hasattr(cls, "instance"):
from django.conf import settings

cls.instance = Erc20EventsIndexer(
EthereumClient(settings.ETHEREUM_NODE_URL)
)
cls.instance = cls.get_new_instance()
return cls.instance

@classmethod
def get_new_instance(cls) -> "Erc20EventsIndexer":
from django.conf import settings

return Erc20EventsIndexer(EthereumClient(settings.ETHEREUM_NODE_URL))

@classmethod
def del_singleton(cls):
if hasattr(cls, "instance"):
Expand Down
11 changes: 8 additions & 3 deletions safe_transaction_service/history/indexers/ethereum_indexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from django.db.models import Min, QuerySet

from celery.exceptions import SoftTimeLimitExceeded
from requests import Timeout

from gnosis.eth import EthereumClient

Expand Down Expand Up @@ -393,7 +394,13 @@ def process_addresses(
to_block_number,
current_block_number=current_block_number,
)
except (FindRelevantElementsException, SoftTimeLimitExceeded) as e:
processed_elements = self.process_elements(elements)
except (
FindRelevantElementsException,
SoftTimeLimitExceeded,
Timeout,
ValueError,
) as e:
self.block_process_limit = 1 # Set back to the very minimum
logger.info(
"%s: block_process_limit set back to %d",
Expand All @@ -402,8 +409,6 @@ def process_addresses(
)
raise e

processed_elements = self.process_elements(elements)

if not self.update_monitored_addresses(
addresses, from_block_number, to_block_number
):
Expand Down
Loading

0 comments on commit 69ca0d6

Please sign in to comment.