Skip to content

Commit

Permalink
XDEFI-5454: setup geckoterminal token source (#1)
Browse files Browse the repository at this point in the history
* handle more chains and refactoring

* check remaining assets not trending anymore

* add some tests

* improve test coverage and change task schedule

* add regression test workflow

* change job hour
  • Loading branch information
0xnabil authored Feb 29, 2024
1 parent 02b949b commit dc8b87a
Show file tree
Hide file tree
Showing 11 changed files with 393 additions and 35 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: run python script

on:
schedule:
- cron: '30 16 * * 5'
- cron: '0 14 * * *'

jobs:
build:
Expand Down
38 changes: 38 additions & 0 deletions .github/workflows/regression.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
name: run tests and check coverage

on: push

jobs:
build:
runs-on: ubuntu-latest
steps:
- name: checkout repo content
uses: actions/checkout@v2 # checkout the repository content

- name: setup python
uses: actions/setup-python@v4
with:
python-version: '3.10' # install the python version needed

- name: install python packages
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: run pytest
id: test_step
run: pytest tests

- name: Fail workflow if tests were failed
if: ${{ steps.test_step.outcome == 'failure' }}
run: exit 1

- name: run coverage
id: test_cov_step
run: |
coverage run -m pytest tests
coverage report -m | awk '/TOTAL/ {print ($4 > 70)}'
- name: Fail workflow if test coverage score is below threshold
if: ${{ steps.test_cov_step.outcome == 0 }}
run: exit 1
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
__pycache__
__pycache__
output.log
.coverage
9 changes: 9 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,16 @@ aiohttp==3.9.3
aiosignal==1.3.1
async-timeout==4.0.3
attrs==23.2.0
coverage==7.4.3
exceptiongroup==1.2.0
frozenlist==1.4.1
idna==3.6
iniconfig==2.0.0
mock==5.1.0
multidict==6.0.5
packaging==23.2
pluggy==1.4.0
pytest==8.0.2
pytest-asyncio==0.23.5
tomli==2.0.1
yarl==1.9.4
18 changes: 18 additions & 0 deletions tests/test_helpers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import pytest
import sys
import os

sys.path.append(os.path.dirname(os.path.dirname(__file__)) + "/trending-pools")

from helpers import parse_token_id

@pytest.mark.parametrize(
"id, chain_and_address",
[
("polygon_pos_0xCrazyShitcoin0", ("polygon_pos", "0xCrazyShitcoin0")),
("eth_0xCrazyShitcoin1", ("eth", "0xCrazyShitcoin1")),
("eth-goerli_0xCrazyShitcoin2", ("eth-goerli", "0xCrazyShitcoin2")),
]
)
def test_parse_token_id(id, chain_and_address):
assert parse_token_id(id) == chain_and_address
251 changes: 251 additions & 0 deletions tests/test_pool_processor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,251 @@
import pytest
import sys
import os
from mock import patch

sys.path.append(os.path.dirname(os.path.dirname(__file__)) + "/trending-pools")

from pool_processor import parse_pool

minu_pool = {
"id": "bsc_0x91d8d7dfcadc8e04195e75069e1316e01ba7f01c",
"attributes": {
"address": "0x91d8d7dfcadc8e04195e75069e1316e01ba7f01c",
"name": "MINU / WBNB",
"volume_usd": {
"h1": "21475.753838700075959",
"h24": "234671.45"
},
"reserve_in_usd": "394219.3869"
},
"relationships": {
"base_token": {
"data": {
"id": "bsc_0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"type": "token"
}
}
}
}

minu_pool_missing_volume = {
"id": "bsc_0x91d8d7dfcadc8e04195e75069e1316e01ba7f01c",
"attributes": {
"address": "0x91d8d7dfcadc8e04195e75069e1316e01ba7f01c",
"name": "MINU / WBNB",
"volume_usd": {
"h1": "21475.753838700075959"
},
"reserve_in_usd": "394219.3869"
},
"relationships": {
"base_token": {
"data": {
"id": "bsc_0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"type": "token"
}
}
}
}

minu_pool_low_volume = {
"id": "bsc_0x91d8d7dfcadc8e04195e75069e1316e01ba7f01c",
"attributes": {
"address": "0x91d8d7dfcadc8e04195e75069e1316e01ba7f01c",
"name": "MINU / WBNB",
"volume_usd": {
"h1": "2475.753838700075959",
"h24": "2671.45"
},
"reserve_in_usd": "394219.3869"
},
"relationships": {
"base_token": {
"data": {
"id": "bsc_0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"type": "token"
}
}
}
}

minu_pool_low_liquidity = {
"id": "bsc_0x91d8d7dfcadc8e04195e75069e1316e01ba7f01c",
"attributes": {
"address": "0x91d8d7dfcadc8e04195e75069e1316e01ba7f01c",
"name": "MINU / WBNB",
"volume_usd": {
"h1": "2475.753838700075959",
"h24": "2671.45"
},
"reserve_in_usd": "39499.3869"
},
"relationships": {
"base_token": {
"data": {
"id": "bsc_0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"type": "token"
}
}
}
}

base_asset_pool = {
"id": "base_0x91d8d7dfcadc8e04195e75069e1316e01ba7f01c",
"attributes": {
"address": "0x91d8d7dfcadc8e04195e75069e1316e01ba7f01c",
"name": "MINU / WBNB",
"volume_usd": {
"h1": "21475.753838700075959",
"h24": "234671.45"
},
"reserve_in_usd": "394219.3869"
},
"relationships": {
"base_token": {
"data": {
"id": "base_0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"type": "token"
}
}
}
}

sol_pool = {
"id": "solana_BJKVBffxiaYgiqu7FeLbdVuhLrnXFFEr3KtH9SuboEdr",
"attributes": {
"address": "BJKVBffxiaYgiqu7FeLbdVuhLrnXFFEr3KtH9SuboEdr",
"name": "SOL / MAI",
"volume_usd": {
"h1": "4486.75019495503720866724797605362203203000202612395689050435316312537258021",
"h24": "589711.91"
},
"reserve_in_usd": "139550.7145"
},
"relationships": {
"base_token": {
"data": {
"id": "solana_So11111111111111111111111111111111111111112",
"type": "token"
}
},
"quote_token": {
"data": {
"id": "solana_CdvCJJQHeU7qzpDWsRjR2E1Vp7GoQfBtg4joR9yprR5Z",
"type": "token"
}
}
}
}

@patch("helpers.get_request", return_value={
"data": {
"attributes": {
"address": "0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"symbol": "MINU",
"decimals": 18,
}
}
}
)
@patch("helpers.load_existing_tokens", return_value=([], []))
@pytest.mark.asyncio
async def test_parse_pool(get_response, token_lists):
assert (await parse_pool(minu_pool)) == {
"id": "bsc_0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"address": "0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"chain": "bsc",
"symbol": "MINU",
"decimals": 18,
"liquidity": "394219.3869",
"volume24": "234671.45"}


@pytest.mark.asyncio
async def test_parse_pool_non_supported_chain():
assert (await parse_pool(base_asset_pool)) is None


@patch("helpers.load_existing_tokens", return_value=(["bsc_0xf48f91df403976060cc05dbbf8a0901b09fdefd4"], [
{
"id": "bsc_0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"address": "0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"chain": "bsc",
"symbol": "MINU",
"decimals": 18,
"liquidity": "3000000",
"volume24": "200000"}
]))
@pytest.mark.asyncio
async def test_parse_existing_pool(token_lists):
assert (await parse_pool(minu_pool)) == {
"id": "bsc_0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"address": "0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"chain": "bsc",
"symbol": "MINU",
"decimals": 18,
"liquidity": "394219.3869",
"volume24": "234671.45"
}


@patch("helpers.load_existing_tokens", return_value=(["bsc_0xf48f91df403976060cc05dbbf8a0901b09fdefd4"], [
{
"id": "bsc_0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"address": "0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"chain": "bsc",
"symbol": "MINU",
"decimals": 18,
"liquidity": "3000000",
"volume24": "200000"}
]))
@pytest.mark.asyncio
async def test_parse_existing_pool_with_low_volume(token_lists):
assert (await parse_pool(minu_pool_low_volume)) is None


@pytest.mark.asyncio
async def test_parse_low_liquidity_pool():
assert (await parse_pool(minu_pool_low_liquidity)) is None


@patch("helpers.get_request", return_value={
"data": {
"id": "solana_CdvCJJQHeU7qzpDWsRjR2E1Vp7GoQfBtg4joR9yprR5Z",
"type": "token",
"attributes": {
"address": "CdvCJJQHeU7qzpDWsRjR2E1Vp7GoQfBtg4joR9yprR5Z",
"name": "Multi AI",
"symbol": "MAI",
"decimals": 9,
}
}
}
)
@patch("helpers.load_existing_tokens", return_value=([], []))
@pytest.mark.asyncio
async def test_parse_pool_sol_as_base_token(get_response, token_lists):
assert (await parse_pool(sol_pool)) == {
"id": "solana_CdvCJJQHeU7qzpDWsRjR2E1Vp7GoQfBtg4joR9yprR5Z",
"address": "CdvCJJQHeU7qzpDWsRjR2E1Vp7GoQfBtg4joR9yprR5Z",
"chain": "solana",
"symbol": "MAI",
"decimals": 9,
"liquidity": "139550.7145",
"volume24": "589711.91"
}


@patch("helpers.load_existing_tokens", return_value=(["bsc_0xf48f91df403976060cc05dbbf8a0901b09fdefd4"], [
{
"id": "bsc_0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"address": "0xf48f91df403976060cc05dbbf8a0901b09fdefd4",
"chain": "bsc",
"symbol": "MINU",
"decimals": 18,
"liquidity": "3000000",
"volume24": "200000"}
]))
@pytest.mark.asyncio
async def test_parse_existing_pool_with_error(token_lists):
assert (await parse_pool(minu_pool_missing_volume)) is None
2 changes: 1 addition & 1 deletion trending-pools/assets.json

Large diffs are not rendered by default.

8 changes: 7 additions & 1 deletion trending-pools/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,12 @@

TOKEN_URL = "/networks/{network}/tokens/{address}"

supported_chains = ["solana", "eth", "bsc", "avax"]
supported_chains = ["solana", "eth", "bsc", "avax", "optimism", "polygon_pos", "cro", "ftm", "aurora", "arbitrum", "xdai", "klaytn", "canto"]

ASSETS_PATH = os.path.dirname(__file__) + "/assets.json"

LIQUIDITY_THRESHOLD = 4e4

VOLUME_THRESHOLD = 1.5e4

FILE_LOGS = os.getenv("FILE_LOGS", 0)
15 changes: 11 additions & 4 deletions trending-pools/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,13 @@
import asyncio
import logging
import json
from typing import Dict, Optional
from typing import Dict, Optional, Tuple
from functools import lru_cache
from constants import ASSETS_PATH, FILE_LOGS

from constants import ASSETS_PATH

log = logging.getLogger(__name__)
log = logging.getLogger("globalLogger")
if FILE_LOGS:
log.addHandler(logging.FileHandler("output.log", mode='w'))

async def get_request(url, nbRetry=1, headers = None, debug=False):
if nbRetry == 0:
Expand Down Expand Up @@ -52,3 +53,9 @@ def load_existing_tokens():
return list(map(lambda x: x["id"], assets)), assets
else:
return [], []

def parse_token_id(base_token_id: str) -> Tuple[str, str]:
parsed_id = base_token_id.split("_")
address = parsed_id[-1]
chain = "_".join(parsed_id[:-1])
return chain, address
Loading

0 comments on commit dc8b87a

Please sign in to comment.