From 040a4d63a3bb518497eb1692d77c4e920f215243 Mon Sep 17 00:00:00 2001 From: coder1963 Date: Sun, 10 Nov 2024 17:54:07 +0530 Subject: [PATCH 1/3] chore: bump update dep --- Makefile | 3 +++ icon_contracts/config.py | 21 ++++++++++---------- icon_contracts/models/contracts.py | 32 +++++++++++++++++------------- requirements_api.txt | 13 ++++++------ requirements_dev.txt | 4 ++-- requirements_worker.txt | 10 +++++----- 6 files changed, 46 insertions(+), 37 deletions(-) diff --git a/Makefile b/Makefile index d698d07..f6efc63 100644 --- a/Makefile +++ b/Makefile @@ -38,5 +38,8 @@ ps: ## List all containers and running status postgres-console: ## Start postgres terminal docker compose -f docker-compose.db.yml -f docker-compose.yml exec postgres psql -U postgres +install: ## Install requirements + pip install -r requirements_api.txt -r requirements_worker.txt -r requirements_dev.txt + help: @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-16s\033[0m %s\n", $$1, $$2}' diff --git a/icon_contracts/config.py b/icon_contracts/config.py index fd6a9a5..213ffad 100644 --- a/icon_contracts/config.py +++ b/icon_contracts/config.py @@ -1,6 +1,6 @@ import os -from pydantic import BaseSettings +from pydantic_settings import BaseSettings, SettingsConfigDict class Settings(BaseSettings): @@ -38,9 +38,9 @@ class Settings(BaseSettings): LOG_FORMAT: str = "string" # ICON Nodes - ICON_NODE_URL = "https://api.icon.community/api/v3" + ICON_NODE_URL: str = "https://api.icon.community/api/v3" # ICON_NODE_URL: str = "https://berlin.net.solidwallet.io/api/v3" - BACKUP_ICON_NODE_URL = "https://ctz.solidwallet.io/api/v3" + BACKUP_ICON_NODE_URL:str = "https://ctz.solidwallet.io/api/v3" COMMUNITY_API_ENDPOINT: str = "https://tracker.icon.community" @@ -50,7 +50,7 @@ class Settings(BaseSettings): # KAFKA_GROUP_ID: str = "contracts" CONSUMER_IS_TAIL: bool = False - JOB_ID: str = None + JOB_ID: str | None = None CONSUMER_GROUP: str = "contracts" # Change this to "earliest" + CONSUMER_GROUP to trigger a manual backfill CONSUMER_AUTO_OFFSET_RESET: str = "latest" @@ -68,9 +68,9 @@ class Settings(BaseSettings): POSTGRES_DATABASE: str = "postgres" # Contract S3 Upload - CONTRACTS_S3_AWS_ACCESS_KEY_ID: str = None - CONTRACTS_S3_AWS_SECRET_ACCESS_KEY: str = None - CONTRACTS_S3_BUCKET: str = None + CONTRACTS_S3_AWS_ACCESS_KEY_ID: str | None = None + CONTRACTS_S3_AWS_SECRET_ACCESS_KEY: str | None = None + CONTRACTS_S3_BUCKET: str | None = None # Endpoints MAX_PAGE_SIZE: int = 100 @@ -98,9 +98,10 @@ class Settings(BaseSettings): governance_address: str = "cx0000000000000000000000000000000000000000" one_address: str = "cx0000000000000000000000000000000000000001" - class Config: - case_sensitive = False - + + model_config = SettingsConfigDict( + case_sensitive=False, + ) if os.environ.get("ENV_FILE", False): settings = Settings(_env_file=os.environ.get("ENV_FILE")) diff --git a/icon_contracts/models/contracts.py b/icon_contracts/models/contracts.py index 3b8ce72..90186b8 100644 --- a/icon_contracts/models/contracts.py +++ b/icon_contracts/models/contracts.py @@ -25,40 +25,44 @@ class Contract(SQLModel, table=True): address: str = Field(primary_key=True) name: Optional[str] = Field(None, index=False) - symbol: str = Field(None, index=False) - decimals: str = Field(None, index=False) + symbol: Optional[str] = Field(None, index=False) + decimals: Optional[str] = Field(None, index=False) - contract_type: str = Field(None, index=True, description="One of python / java") - token_standard: str = Field(None, index=True, description="One of Contract, IRC2") + contract_type: Optional[str] = Field(None, index=True, description="One of python / java") + token_standard: Optional[str] = Field(None, index=True, description="One of Contract, IRC2") email: Optional[str] = Field(None, index=False) website: Optional[str] = Field(None, index=False) last_updated_block: Optional[int] = Field(None, index=True) - last_updated_timestamp: Optional[int] = Field(None, sa_column=Column(BIGINT), index=True) + last_updated_timestamp: Optional[int] = Field( + None, sa_column=Column(BIGINT, index=True) + ) created_block: Optional[int] = Field(None, index=True) - created_timestamp: Optional[int] = Field(None, sa_column=Column(BIGINT), index=True) + created_timestamp: Optional[int] = Field( + None, sa_column=Column(BIGINT, index=True) + ) creation_hash: Optional[str] = Field(None, index=False) owner_address: Optional[str] = Field(None, index=False) current_version: Optional[str] = Field(None, index=False) - abi: List[dict] = Field(None, index=False, sa_column=Column(JSON)) + abi: List[dict] = Field(None, sa_column=Column(JSON, index=False)) - source_code_link: str = Field(None, index=False) - verified_source_code_link: str = Field(None, index=False) - verified: bool = Field(False, index=True) - revision_number: int = Field( + source_code_link: Optional[str] = Field(None, index=False) + verified_source_code_link: Optional[str] = Field(None, index=False) + verified: Optional[bool] = Field(False, index=True) + revision_number: Optional[int] = Field( -1, index=False, description="Out of order ID for zipped up source code in s3 " "/bucket/[address]_[revision_number].zip", ) - audit_tx_hash: str = Field(None, index=False) - code_hash: str = Field(None, index=False) - deploy_tx_hash: str = Field(None, index=False) + audit_tx_hash: Optional[str] = Field(None, index=False) + code_hash: Optional[str] = Field(None, index=False) + deploy_tx_hash: Optional[str] = Field(None, index=False) status: Optional[str] = Field( None, index=True, description="Field to inform audit status of 1.0 contracts." diff --git a/requirements_api.txt b/requirements_api.txt index b105260..704e953 100644 --- a/requirements_api.txt +++ b/requirements_api.txt @@ -1,9 +1,10 @@ loguru==0.5.3 prometheus_client==0.11.0 -asyncpg==0.24.0 -fastapi==0.68.1 -sqlmodel==0.0.4 -uvicorn==0.15.0 +asyncpg +fastapi +sqlmodel +uvicorn requests==2.26.0 -fastapi-health==0.4.0 -brotli-asgi==1.1.0 \ No newline at end of file +fastapi-health +brotli-asgi==1.1.0 +pydantic_settings \ No newline at end of file diff --git a/requirements_dev.txt b/requirements_dev.txt index 04a5524..2f1fe11 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -2,5 +2,5 @@ pytest pytest-mock pytest-cov -docker-compose -grpcio-tools==1.40.0 +# docker-compose +grpcio-tools diff --git a/requirements_worker.txt b/requirements_worker.txt index 4ad7417..1dd7430 100644 --- a/requirements_worker.txt +++ b/requirements_worker.txt @@ -6,12 +6,12 @@ requests==2.26.0 loguru==0.5.3 prometheus_client==0.11.0 -grpcio==1.40.0 +grpcio # Temporary breaking change -protobuf==3.20.1 +protobuf==3.20.* confluent_kafka psycopg2-binary -alembic==1.7.1 -asyncpg==0.24.0 -sqlmodel==0.0.4 +alembic +asyncpg +sqlmodel From 68872cb845c7e412095e9cee2fe421b311100e54 Mon Sep 17 00:00:00 2001 From: coder1963 Date: Sat, 30 Nov 2024 12:37:51 +0530 Subject: [PATCH 2/3] chore: update with correct way to use the select from sqlalchemy 2.x --- icon_contracts/api/v1/endpoints/contracts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/icon_contracts/api/v1/endpoints/contracts.py b/icon_contracts/api/v1/endpoints/contracts.py index a4a09b0..9aef271 100644 --- a/icon_contracts/api/v1/endpoints/contracts.py +++ b/icon_contracts/api/v1/endpoints/contracts.py @@ -26,7 +26,7 @@ async def get_contracts( ) -> List[Contract]: """Return list of contracts""" query = select(Contract).offset(skip).limit(limit) - query_count = select([func.count(Contract.address)]) + query_count = select(func.count(Contract.address)) if contract_type: query = query.where(Contract.contract_type == contract_type) From 220544527862a134fa32eedc0bef0cc2b83895ec Mon Sep 17 00:00:00 2001 From: coder1963 Date: Sat, 30 Nov 2024 12:49:49 +0530 Subject: [PATCH 3/3] chore: fix failing test by wrapping textual querys in sqlalchemy text func --- icon_contracts/workers/kafka.py | 3 ++- tests/integration/worker/test_partition_init.py | 9 +++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/icon_contracts/workers/kafka.py b/icon_contracts/workers/kafka.py index e9fee19..6f7f105 100644 --- a/icon_contracts/workers/kafka.py +++ b/icon_contracts/workers/kafka.py @@ -9,6 +9,7 @@ # from confluent_kafka.serialization import StringSerializer from loguru import logger from pydantic import BaseModel +from sqlalchemy import text from icon_contracts.config import settings @@ -25,7 +26,7 @@ def get_current_offset(session): output = {} while True: logger.info(f"Getting kafka job with job_id = {settings.JOB_ID}") - sql = f"select * from kafka_jobs WHERE job_id='{settings.JOB_ID}';" + sql = text(f"select * from kafka_jobs WHERE job_id='{settings.JOB_ID}';") result = session.execute(sql).fetchall() session.commit() diff --git a/tests/integration/worker/test_partition_init.py b/tests/integration/worker/test_partition_init.py index f38cdec..8ea3a31 100644 --- a/tests/integration/worker/test_partition_init.py +++ b/tests/integration/worker/test_partition_init.py @@ -1,4 +1,5 @@ import pytest +from sqlalchemy import text from icon_contracts.config import settings from icon_contracts.workers.kafka import get_current_offset @@ -10,17 +11,17 @@ def backfill_job(db): def f(job_id): with db as session: - sql = "DROP TABLE IF EXISTS kafka_jobs;" + sql = text("DROP TABLE IF EXISTS kafka_jobs;") session.execute(sql) session.commit() - sql = "CREATE TABLE IF NOT EXISTS kafka_jobs (job_id varchar, worker_group varchar, topic varchar, partition bigint, stop_offset bigint, PRIMARY KEY (job_id, worker_group, topic, partition));" + sql = text("CREATE TABLE IF NOT EXISTS kafka_jobs (job_id varchar, worker_group varchar, topic varchar, partition bigint, stop_offset bigint, PRIMARY KEY (job_id, worker_group, topic, partition));") session.execute(sql) session.commit() num_msgs = 1000 for i in range(0, 12): - sql = ( + sql = text( f"INSERT INTO kafka_jobs (job_id, worker_group, topic, partition, stop_offset) VALUES " f"('{job_id}','{settings.CONSUMER_GROUP}-{job_id}'," f"'{settings.CONSUMER_TOPIC_BLOCKS}','{i}','{num_msgs}');" @@ -33,7 +34,7 @@ def f(job_id): def test_get_current_offset(db, backfill_job): - settings.JOB_ID = "test6" + settings.JOB_ID = text("test6") backfill_job(settings.JOB_ID) with db as session: