From f7161a33e6179c42983dbe5d45a2034ea3281dd6 Mon Sep 17 00:00:00 2001 From: smk762 Date: Fri, 7 Jun 2024 14:04:24 +0200 Subject: [PATCH] print( -> logger.info( --- alerts/client.py | 15 +- alerts/config.py | 5 +- alerts/logger.py | 390 ++++++++++++++++++++ alerts/models.py | 5 +- code/scripts/align_ntx_scores.py | 5 +- code/scripts/archive_season_data.py | 5 +- code/scripts/build_prices_dict.py | 5 +- code/scripts/convert_seednodes.py | 3 +- code/scripts/cron_reward_testnet.py | 27 +- code/scripts/cron_update_addresses_table.py | 3 +- code/scripts/cron_update_mined_tables.py | 3 +- code/scripts/cron_update_social_notaries.py | 3 +- code/scripts/generate_raw_transaction.py | 9 +- code/scripts/generate_sendmany.py | 5 +- code/scripts/get_mined_by_year.py | 5 +- code/scripts/get_mining_stats.py | 7 +- code/scripts/lib_api.py | 7 +- code/scripts/lib_coins.py | 9 +- code/scripts/lib_color.py | 27 +- code/scripts/lib_const.py | 2 +- code/scripts/lib_crypto.py | 3 +- code/scripts/lib_dpow_const.py | 3 +- code/scripts/lib_electrum.py | 9 +- code/scripts/lib_epochs.py | 5 +- code/scripts/lib_github.py | 11 +- code/scripts/lib_mining.py | 19 +- code/scripts/lib_ntx.py | 3 +- code/scripts/lib_query.py | 5 +- code/scripts/lib_query_ntx.py | 27 +- code/scripts/lib_rpc.py | 9 +- code/scripts/lib_tests.py | 3 +- code/scripts/lib_update.py | 9 +- code/scripts/lib_update_ntx.py | 9 +- code/scripts/lib_urls.py | 3 +- code/scripts/lib_vote.py | 13 +- code/scripts/lib_wallet.py | 3 +- code/scripts/scan_mining.py | 3 +- code/scripts/scan_vote_snapshot.py | 7 +- code/scripts/validate_tables.py | 9 +- code/scripts/verify_other_nn_btc_tx.py | 13 +- code/scripts/verify_other_nn_ltc_tx.py | 13 +- 41 files changed, 575 insertions(+), 144 deletions(-) create mode 100644 alerts/logger.py diff --git a/alerts/client.py b/alerts/client.py index 31fea063..2e1f91a5 100644 --- a/alerts/client.py +++ b/alerts/client.py @@ -4,6 +4,7 @@ import asyncio from config import ALERT_INTERVAL, DISCORD_CHANNEL, DISCORD_TOKEN from models import NotaryMonitor +from logger import logger class MyClient(discord.Client): def __init__(self, *args, **kwargs): @@ -15,15 +16,15 @@ async def setup_hook(self) -> None: self.bg_task = self.loop.create_task(self.process_alerts()) async def on_ready(self): - print(f'Logged in as {self.user} (ID: {self.user.id})') - print('------') + logger.info(f'Logged in as {self.user} (ID: {self.user.id})') + logger.info('------') async def process_alerts(self): await self.wait_until_ready() - print('Logged in as') - print(self.user.name) - print(self.user.id) - print('------') + logger.info('Logged in as') + logger.info(self.user.name) + logger.info(self.user.id) + logger.info('------') channel = self.get_channel(DISCORD_CHANNEL) while not self.is_closed(): @@ -33,7 +34,7 @@ async def process_alerts(self): msg = self.sauron.alert_slow_miners() if msg != "": # await self.channel.send(msg) - print(msg) + logger.info(msg) break if loop > 12: diff --git a/alerts/config.py b/alerts/config.py index 317e5c1e..76a5f27d 100644 --- a/alerts/config.py +++ b/alerts/config.py @@ -1,4 +1,5 @@ import os +from logger import logger from dotenv import load_dotenv load_dotenv() @@ -6,8 +7,8 @@ try: DISCORD_CHANNEL = int(os.getenv('DISCORD_CHANNEL')) except: - print("You need to add 'DISCORD_CHANNEL' to .env file") + logger.info("You need to add 'DISCORD_CHANNEL' to .env file") DISCORD_TOKEN = os.getenv('DISCORD_TOKEN') if DISCORD_TOKEN is None: - print("You need to add 'DISCORD_TOKEN' to .env file") \ No newline at end of file + logger.info("You need to add 'DISCORD_TOKEN' to .env file") \ No newline at end of file diff --git a/alerts/logger.py b/alerts/logger.py new file mode 100644 index 00000000..d9ad0dac --- /dev/null +++ b/alerts/logger.py @@ -0,0 +1,390 @@ +#!/usr/bin/env python3.12 +from datetime import datetime, timezone +from os.path import basename, dirname, abspath +import logging +import functools + +PROJECT_ROOT_PATH = dirname(dirname(abspath(__file__))) + +class CustomFormatter(logging.Formatter): + white = "\x1b[m" + italic_white = "\x1b[3m" + underline_white = "\x1b[4m" + + muted = "\x1b[38;2;2;20;5m" + debug = "\x1b[38;50;2;2;5m" + black = "\x1b[30m" + lightgrey = "\x1b[37m" + grey = "\x1b[38;20m" + midgrey = "\x1b[90m" + + gold = "\x1b[33m" + yellow = "\x1b[93m" + yellow2 = "\x1b[33;20m" + yellow3 = "\x1b[33;1m" + lightyellow = "\x1b[38;2;250;250;150m" + + green = "\x1b[32m" + mintgreen = "\x1b[38;2;150;250;150m" + lightgreen = "\x1b[92m" + othergreen = "\x1b[32;1m" + drabgreen = "\x1b[38;2;150;200;150m" + + skyblue = "\x1b[38;2;150;250;250m" + iceblue = "\x1b[38;2;59;142;200m" + blue = "\x1b[34m" + magenta = "\x1b[35m" + purple = "\x1b[38;2;150;150;250m" + cyan = "\x1b[36m" + + lightblue = "\x1b[96m" + lightcyan = "\x1b[96m" + + pink = "\x1b[95m" + lightred = "\x1b[91m" + red = "\x1b[31;20m" + red2 = "\x1b[31m" + bold_red = "\x1b[31;1m" + + table = "\x1b[37m" + status = "\x1b[94m" + debug = "\x1b[30;1m" + reset = "\x1b[0m" + + format = ( + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + ) + datefmt = "%d-%b-%y %H:%M:%S" + + FORMATS = { + logging.DEBUG: debug + format + reset, + logging.INFO: lightgreen + format + reset, + logging.WARNING: red + format + reset, + logging.ERROR: lightred + format + reset, + logging.CRITICAL: bold_red + format + reset, + } + + def format(self, record): + if record.levelname == "STOPWATCH": + log_fmt = ( + self.pink + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + + self.reset + ) + elif record.levelname == "PAIR": + # Blue for lib class + log_fmt = ( + self.skyblue + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + ) + elif record.levelname == "DEXRPC": + # Blue for lib class + log_fmt = ( + self.iceblue + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + + self.reset + ) + elif record.levelname == "SOURCED": + # Blue for lib class + log_fmt = ( + self.blue + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + + self.reset + ) + elif record.levelname == "QUERY": + # Yellow for incoming data + log_fmt = ( + self.lightyellow + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + + self.reset + ) + elif record.levelname == "REQUEST": + # Yellow for incoming data + log_fmt = ( + self.gold + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + + self.reset + ) + elif record.levelname == "LOOP": + # Purple for cache loops + log_fmt = ( + self.purple + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + + self.reset + ) + elif record.levelname == "CALC": + # Cyan for data processing + log_fmt = ( + self.lightcyan + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + + self.reset + ) + elif record.levelname == "MERGE": + # Cyan for data processing + log_fmt = ( + self.cyan + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + + self.reset + ) + elif record.levelname == "CACHED": + # Green for data storage + log_fmt = ( + self.drabgreen + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + + self.reset + ) + elif record.levelname == "SAVED": + # Green for data storage + log_fmt = ( + self.mintgreen + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + + self.reset + ) + elif record.levelname == "UPDATED": + # Green for data storage + log_fmt = ( + self.lightgreen + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + ) + elif record.levelname == "MUTED": + log_fmt = ( + self.muted + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + + self.reset + ) + elif record.levelname == "DEBUG": + log_fmt = ( + self.debug + + "[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s (%(filename)s:%(lineno)d)" + + self.reset + ) + else: + log_fmt = self.FORMATS.get(record.levelno) + formatter = logging.Formatter(log_fmt, datefmt="%d-%b-%y %H:%M:%S") + return formatter.format(record) + + +def addLoggingLevel(levelName, levelNum, methodName=None): + # From https://stackoverflow.com/questions/2183233/ + # how-to-add-a-custom-loglevel-to-pythons-logging-facility/ + + if not methodName: + methodName = levelName.lower() + + if hasattr(logging, levelName): + raise AttributeError("{} already defined in logging module".format(levelName)) + if hasattr(logging, methodName): + raise AttributeError("{} already defined in logging module".format(methodName)) + if hasattr(logging.getLoggerClass(), methodName): + raise AttributeError("{} already defined in logger class".format(methodName)) + + def logForLevel(self, message, *args, **kwargs): + if self.isEnabledFor(levelNum): + self._log(levelNum, message, args, **kwargs) + + def logToRoot(message, *args, **kwargs): + logging.log(levelNum, message, *args, **kwargs) + + logging.addLevelName(levelNum, levelName) + setattr(logging, levelName, levelNum) + setattr(logging.getLoggerClass(), methodName, logForLevel) + setattr(logging, methodName, logToRoot) + + +logger = logging.getLogger("defi-stats") +# create console handler with a higher log level +handler = logging.StreamHandler() +handler.setFormatter(CustomFormatter()) +logger.addHandler(handler) + + +addLoggingLevel("SOURCED", logging.DEBUG + 14) +logger.setLevel("SOURCED") + + +addLoggingLevel("SAVED", logging.DEBUG + 13) +logger.setLevel("SAVED") + + +addLoggingLevel("CACHED", logging.DEBUG + 12) +logger.setLevel("CACHED") + + +addLoggingLevel("PAIR", logging.DEBUG + 11) +logger.setLevel("PAIR") + + +addLoggingLevel("MERGE", logging.DEBUG + 9) +logger.setLevel("MERGE") + +# Shows cache updates +addLoggingLevel("UPDATED", logging.DEBUG + 8) +logger.setLevel("UPDATED") + +# Shows database req/resp +addLoggingLevel("QUERY", logging.DEBUG + 7) +logger.setLevel("QUERY") + +# Shows dex api req/resp +addLoggingLevel("DEXRPC", logging.DEBUG + 6) +logger.setLevel("DEXRPC") + +# Shows cache loop updates +addLoggingLevel("LOOP", logging.DEBUG + 5) +logger.setLevel("LOOP") + +# Shows cache loop updates +addLoggingLevel("CALC", logging.DEBUG + 4) +logger.setLevel("CALC") + +# Shows generally ignorable errors, e.g. CoinConfigNotFound +addLoggingLevel("MUTED", logging.DEBUG - 1) +logger.setLevel("MUTED") + +# Shows cache loop updates +addLoggingLevel("REQUEST", logging.DEBUG + 2) +logger.setLevel("REQUEST") + + +def send_log(loglevel, msg): + match loglevel: + case "info": + logger.info(f" {msg}") + case "muted": + pass + case "saved": + logger.saved(f" {msg}") + case "merge": + logger.merge(f" {msg}") + case "merge": + logger.merge(f" {msg}") + case "updated": + logger.updated(f"{msg}") + case "calc": + logger.calc(f" {msg}") + case "warning": + logger.warning(f"{msg}") + case "error": + logger.error(f" {msg}") + case "debug": + logger.debug(f" {msg}") + case "error": + logger.error(f" {msg}") + case "loop": + logger.loop(f" {msg}") + case "pair": + logger.pair(f" {msg}") + case "query": + logger.query(f" {msg}") + case "sourced": + logger.sourced(f"{msg}") + case "request": + logger.request(f"{msg}") + case "cached": + logger.cached(f" {msg}") + case _: + logger.debug(f" {msg}") + + +class StopWatch: + def __init__(self, start_time, trace, loglevel="debug", msg="") -> None: + self.start_time = start_time + self.msg = msg + self.trace = trace + self.loglevel = loglevel + self.get_stopwatch() + + def get_stopwatch(self): + duration = int(datetime.now(timezone.utc).timestamp()) - int(self.start_time) + if not isinstance(self.msg, str): + self.msg = str(self.msg) + lineno = self.trace["lineno"] + filename = self.trace["file"] + func = self.trace["function"] + if PROJECT_ROOT_PATH in self.msg: + self.msg = self.msg.replace(f"{PROJECT_ROOT_PATH}/", "") + self.msg = f"{duration:>2} sec | {func:<20} | {str(self.msg):<80} " + self.msg += f"| {basename(filename)}:{lineno}" + send_log(loglevel=self.loglevel, msg=self.msg) + + +def get_trace(func, error=None): + msg = { + "function": func.__name__, + "file": func.__code__.co_filename, + "lineno": func.__code__.co_firstlineno, + "vars": func.__code__.co_varnames, + } + if error is not None: + msg.update({"error": error}) + return msg + + +# Returns console colors for customising +def show_pallete(): + logger.info("info") + logger.debug("debug") + logger.warning("warning") + logger.error("error") + logger.critical("critical") + logger.updated("updated") + logger.merge("merge") + logger.saved("saved") + logger.calc("calc") + logger.dexrpc("dexrpc") + logger.loop("loop") + logger.muted("muted") + logger.query("query") + logger.request("request") + logger.cached("cached") + + +# A decorator for returning runtime of functions:def timed(func): +def timed(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + start_time = int(datetime.now(timezone.utc).timestamp()) + duration = int(datetime.now(timezone.utc).timestamp()) - start_time + trace = get_trace(func) + msg = "<<< no msg provided >>>" + try: + result = func(*args, **kwargs) + except Exception as e: + ignore_until = 0 + loglevel = "error" + if isinstance(e, ValueError): + # Custom logic here + pass + msg = f"{type(e)}: {e}" + StopWatch(start_time, trace=trace, loglevel=loglevel, msg=msg) + else: + send = False + msg = "" + ignore_until = 0 + loglevel = "info" + if isinstance(result, dict): + if "loglevel" in result: + loglevel = result["loglevel"] + send = True + else: + # if not using `default.result` + return result + if "message" in result: + msg = result["message"] + send = True + if "ignore_until" in result: + ignore_until = result["ignore_until"] + send = True + if duration >= ignore_until and send: + StopWatch(start_time, trace=trace, loglevel=loglevel, msg=msg) + # Using `default.result`, with actual data to return + if "data" in result: + if result["data"] is not None: + result = result["data"] + return result + + return wrapper + + +if __name__ == "__main__": + show_pallete() \ No newline at end of file diff --git a/alerts/models.py b/alerts/models.py index b34b9b76..6b33088a 100644 --- a/alerts/models.py +++ b/alerts/models.py @@ -1,6 +1,7 @@ import time import datetime import requests +from logger import logger from contacts import NN_DISCORD_IDS class NotaryMonitor(): @@ -12,8 +13,8 @@ def get_last_mined(self): r = requests.get("https://stats.kmd.io/api/mining/notary_last_mined_table/") return r.json()["results"] except Exception as e: - print(f"Error: {e}") - print(f"Response: {r.content}") + logger.info(f"Error: {e}") + logger.info(f"Response: {r.content}") return {} def alert_slow_miners(self): diff --git a/code/scripts/align_ntx_scores.py b/code/scripts/align_ntx_scores.py index 6263ff3a..997a84bd 100755 --- a/code/scripts/align_ntx_scores.py +++ b/code/scripts/align_ntx_scores.py @@ -5,6 +5,7 @@ from decorators import print_runtime import lib_ntx import lib_update_ntx +from logger import logger ''' At the end of a season (or after an epoch change), the ntx scores may need to be realigned to the new epoch scores. @@ -40,7 +41,7 @@ coin_ranges[server][coin].update({ "end": end }) - print(f"{season} {server} {epoch} {score}") + logger.info(f"{season} {server} {epoch} {score}") for coin in coin_ranges[server]: if "end" not in coin_ranges[server][coin]: @@ -70,6 +71,6 @@ if __name__ == '__main__': - print(coin_ranges) + logger.info(coin_ranges) diff --git a/code/scripts/archive_season_data.py b/code/scripts/archive_season_data.py index 7ef83605..5573179c 100755 --- a/code/scripts/archive_season_data.py +++ b/code/scripts/archive_season_data.py @@ -6,13 +6,14 @@ from lib_update_ntx import delete_from_notarised_tbl_where, update_ntx_row import lib_helper as helper import lib_ntx +from logger import logger def archive_past_seasons(current_seasons=["Season_7"]): for season in SEASONS_INFO: - print(season) + logger.info(season) if season not in current_seasons: - print(season) + logger.info(season) data = select_from_notarised_tbl_where(season=season) for i in data: txid = i[1] diff --git a/code/scripts/build_prices_dict.py b/code/scripts/build_prices_dict.py index 738a9249..3c796f81 100644 --- a/code/scripts/build_prices_dict.py +++ b/code/scripts/build_prices_dict.py @@ -7,12 +7,13 @@ from lib_api import get_kmd_price import datetime from datetime import datetime as dt +from logger import logger try: with open("prices_history.json", "r") as j: prices = json.load(j) except Exception as e: - print(e) + logger.info(e) prices = {} for season in ["Season_7"]: @@ -35,7 +36,7 @@ date = "-".join(date) api_prices = get_kmd_price(date) prices[season][f"{start}"].update(api_prices) - print(api_prices['aud']) + logger.info(api_prices['aud']) time.sleep(5) start += datetime.timedelta(days=1) diff --git a/code/scripts/convert_seednodes.py b/code/scripts/convert_seednodes.py index ae0adb23..5f926808 100644 --- a/code/scripts/convert_seednodes.py +++ b/code/scripts/convert_seednodes.py @@ -1,12 +1,13 @@ import csv import json +from logger import logger seednodes = {"Season_7": {}} with open('s7_seednodes.csv', 'r') as file: reader = csv.reader(file) for row in reader: - print(row) + logger.info(row) seednodes["Season_7"].update({ row[0]: { "IP": row[2], diff --git a/code/scripts/cron_reward_testnet.py b/code/scripts/cron_reward_testnet.py index 6a6617ad..b44893b5 100755 --- a/code/scripts/cron_reward_testnet.py +++ b/code/scripts/cron_reward_testnet.py @@ -10,6 +10,7 @@ import lib_urls as urls from notary_candidates import CANDIDATE_ADDRESSES from notary_pubkeys import NOTARY_PUBKEYS +from logger import logger PUBKEY = "03f7e4f3dfff18aa16ef409d5973397dc968f9077a21acef0c4af3b4829c2a9fb5" VOTE_YEAR = "VOTE2022" @@ -29,10 +30,10 @@ proposal_nodes = lib_vote.get_proposal_nodes() if len(ntx) == 0: - print("No ntx detected!") + logger.info("No ntx detected!") sys.exit() - print(f"{len(ntx)} ntx detected!") + logger.info(f"{len(ntx)} ntx detected!") with open('/home/smk762/kmd_ntx_stats_docker/code/scripts/rewarded_ntx.json', 'r') as f: rewarded_ntx = json.load(f) @@ -62,7 +63,7 @@ utxo_ref = f'{utxo["tx_hash"]}_{utxo["tx_pos"]}' if utxo_ref in used_utxos: - print(f"{utxo_ref} already used!") + logger.info(f"{utxo_ref} already used!") else: amount = utxo["value"] / 100000000 if amount > 5: @@ -83,7 +84,7 @@ }) else: - print(f"{notary} not in proposal nodes") + logger.info(f"{notary} not in proposal nodes") remaining_input_value = round(input_value - (reward_amount * len(vouts)) - 0.0001, 5) vouts.update({ @@ -93,28 +94,28 @@ try: rawhex = RPC[VOTE_YEAR].createrawtransaction(input_utxo, vouts) - print(f"rawhex: {rawhex}") + logger.info(f"rawhex: {rawhex}") time.sleep(0.1) signedhex = RPC[VOTE_YEAR].signrawtransaction(rawhex) - print(f"signedhex: {signedhex}") + logger.info(f"signedhex: {signedhex}") time.sleep(0.1) txid = RPC[VOTE_YEAR].sendrawtransaction(signedhex["hex"]) - print(f"Sent {reward_amount} each to {notaries} for {coin}:{ac_height}") - print(f"txid: {txid}") + logger.info(f"Sent {reward_amount} each to {notaries} for {coin}:{ac_height}") + logger.info(f"txid: {txid}") time.sleep(0.1) except Exception as e: - print(e) - print(utxo) - print(vouts) + logger.info(e) + logger.info(utxo) + logger.info(vouts) rewarded_ntx[coin].append(ac_height) break else: - print("utxo too small") + logger.info("utxo too small") else: - print("ntx already rewarded") + logger.info("ntx already rewarded") json.dump(used_utxos, open('used_utxos.json', 'w+')) json.dump(rewarded_ntx, open('rewarded_ntx.json', 'w+')) diff --git a/code/scripts/cron_update_addresses_table.py b/code/scripts/cron_update_addresses_table.py index d0d5646f..18c6c5ab 100755 --- a/code/scripts/cron_update_addresses_table.py +++ b/code/scripts/cron_update_addresses_table.py @@ -4,6 +4,7 @@ from decorators import * from lib_const import * from lib_wallet import populate_addresses +from logger import logger ''' You should only need to run this once per season, unless notary pubkeys change @@ -17,7 +18,7 @@ def update_adresses(seasons): CONN.commit() for season in seasons: - print(season) + logger.info(season) for server in SEASONS_INFO[season]['servers']: populate_addresses(season, server) diff --git a/code/scripts/cron_update_mined_tables.py b/code/scripts/cron_update_mined_tables.py index aa518b2a..d4d002f8 100755 --- a/code/scripts/cron_update_mined_tables.py +++ b/code/scripts/cron_update_mined_tables.py @@ -3,6 +3,7 @@ from lib_helper import has_season_started from lib_mining import update_mined_table, update_mined_count_daily_table, update_mined_count_season_table from decorators import print_runtime +from logger import logger ''' Script for updating mining related databases @@ -21,7 +22,7 @@ def run_updates(seasons): update_mined_count_daily_table("since_genesis", True, True) else: for season in seasons: - print(f"Getting mined blocks for {season}") + logger.info(f"Getting mined blocks for {season}") if RESCAN_SEASON: update_mined_table(season, "KMD", SEASONS_INFO[season]["start_block"]) update_mined_count_daily_table(season, True) diff --git a/code/scripts/cron_update_social_notaries.py b/code/scripts/cron_update_social_notaries.py index 962e6ec6..7b0cb834 100755 --- a/code/scripts/cron_update_social_notaries.py +++ b/code/scripts/cron_update_social_notaries.py @@ -8,6 +8,7 @@ from lib_helper import get_season_notaries, get_nn_region_split from lib_urls import get_notary_nodes_repo_elected_nn_social_url, get_notary_addresses_url from models import nn_social_row +from logger import logger @print_runtime @@ -181,7 +182,7 @@ def remove_invalid_notaries(season): if __name__ == "__main__": - print(EXCLUDED_SEASONS) + logger.info(EXCLUDED_SEASONS) season = "Season_7" logger.info(f"Preparing to populate {season} [social_notaries] table...") populate_social_notaries(season) diff --git a/code/scripts/generate_raw_transaction.py b/code/scripts/generate_raw_transaction.py index 1d88bd44..401fc0ee 100644 --- a/code/scripts/generate_raw_transaction.py +++ b/code/scripts/generate_raw_transaction.py @@ -4,6 +4,7 @@ import json import platform from slickrpc import Proxy +from logger import logger # define data dir def def_data_dir(): @@ -37,13 +38,13 @@ def def_credentials(coin): if coin == 'KMD': rpcport = 7771 else: - print("rpcport not in conf file, exiting") - print("check " + coin_config_file) + logger.info("rpcport not in conf file, exiting") + logger.info("check " + coin_config_file) exit(1) try: return (Proxy("http://%s:%s@127.0.0.1:%d" % (rpcuser, rpcpassword, int(rpcport)), timeout=90)) except: - print("Unable to set RPC proxy, please confirm rpcuser, rpcpassword and rpcport are set in "+coin_config_file) + logger.info("Unable to set RPC proxy, please confirm rpcuser, rpcpassword and rpcport are set in "+coin_config_file) kmd_rpc = def_credentials("KMD") @@ -59,4 +60,4 @@ def def_credentials(coin): input_txids.append({"txid":txid["txid"], "vout":txid["vout"]}) -print("komodo-cli createrawtransaction '"+json.dumps(input_txids).replace('"', '\"')+"' '{\""+output_addr+"\":"+str(output_value)+"}'") +logger.info("komodo-cli createrawtransaction '"+json.dumps(input_txids).replace('"', '\"')+"' '{\""+output_addr+"\":"+str(output_value)+"}'") diff --git a/code/scripts/generate_sendmany.py b/code/scripts/generate_sendmany.py index 550657fb..c1aabbc6 100755 --- a/code/scripts/generate_sendmany.py +++ b/code/scripts/generate_sendmany.py @@ -3,6 +3,7 @@ import sys import json import requests +from logger import logger coin = input("Coin: ") season = input("Season: ") @@ -22,5 +23,5 @@ for address in addresses: sendmany.update({address:amount}) -print(coin+' sendmany "" "'+json.dumps(sendmany).replace('"', '\\"')+'"') -print(f"Sendmany generated for {len(resp)} {season} {server} addresses to send {amount}") +logger.info(coin+' sendmany "" "'+json.dumps(sendmany).replace('"', '\\"')+'"') +logger.info(f"Sendmany generated for {len(resp)} {season} {server} addresses to send {amount}") diff --git a/code/scripts/get_mined_by_year.py b/code/scripts/get_mined_by_year.py index e5ae4b6d..ddd4376c 100644 --- a/code/scripts/get_mined_by_year.py +++ b/code/scripts/get_mined_by_year.py @@ -1,4 +1,5 @@ import lib_query +from logger import logger rewards = {} for year in range(2016, 2024): @@ -18,7 +19,7 @@ rewards.update({"since_genesis": resp}) for i in rewards: - print(f"Rewards {i}: {rewards[i]['claims']} claims, {rewards[i]['value']} KMD value") + logger.info(f"Rewards {i}: {rewards[i]['claims']} claims, {rewards[i]['value']} KMD value") mined = {} @@ -39,4 +40,4 @@ mined.update({"since_genesis": resp}) for i in mined: - print(f"Mined {i}: {mined[i]['blocks']} blocks, {mined[i]['value']} KMD value") \ No newline at end of file + logger.info(f"Mined {i}: {mined[i]['blocks']} blocks, {mined[i]['value']} KMD value") \ No newline at end of file diff --git a/code/scripts/get_mining_stats.py b/code/scripts/get_mining_stats.py index fe596879..7daf71f6 100644 --- a/code/scripts/get_mining_stats.py +++ b/code/scripts/get_mining_stats.py @@ -5,6 +5,7 @@ import calendar import lib_rpc from datetime import datetime +from logger import logger if __name__ == '__main__': @@ -13,13 +14,13 @@ for year in range(2018, 2024): mining_stats.update({year:{}}) for month in range(1, 13): - print(year) + logger.info(year) month_str = dt.datetime(year, month, 1, 0, 0).strftime("%B") ldom = calendar.monthrange(year, month)[1] min_blocktime = int(dt.datetime(year, month, 1, 0, 0).timestamp()) max_blocktime = int(dt.datetime(year, month, ldom, 23, 59, 59).timestamp()) url = f"http://116.203.120.91:8762/info/mined_between_blocktimes/?min_blocktime={min_blocktime}&max_blocktime={max_blocktime}" - print(f"url: {url}") + logger.info(f"url: {url}") data = requests.get(url).json()["results"] if data["blocks_mined"]: baseline_mined_value = data["blocks_mined"] * 3 @@ -47,6 +48,6 @@ surplus_to_mining = r["total"] - last_supply - data["sum_mined"] data.update({"claimed_rewards_for_month": surplus_to_mining}) last_supply = r["total"] - print(resp) + logger.info(resp) with open(f"mining_stats_by_month.json", "w+") as j: json.dump(mining_stats, j, indent=4) diff --git a/code/scripts/lib_api.py b/code/scripts/lib_api.py index ed732b37..127829f4 100644 --- a/code/scripts/lib_api.py +++ b/code/scripts/lib_api.py @@ -6,6 +6,7 @@ from lib_const import * from lib_github import * from lib_urls import get_dpow_active_coins_url, get_kmd_price_url +from logger import logger def api_sleep_or_exit(resp, exit=None): @@ -58,7 +59,7 @@ def get_btc_address_txids(address, before=None): logger.info(f"getting BTC TXIDs for {address} before block {before}") try: url = 'https://api.blockcypher.com/v1/btc/main/addrs/'+address+'?limit=2000' - print(url) + logger.info(url) if before: url = url+'&before='+str(before) headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'} @@ -85,7 +86,7 @@ def get_btc_tx_info(tx_hash, wait=True, exit_script=False): return r.json() except Exception as e: logger.warning(e) - print("err in get_btc_tx_info") + logger.info("err in get_btc_tx_info") return {"err":str(e)} if exit_script: logger.warning("Exiting script to avoid API rate limits...") @@ -160,7 +161,7 @@ def get_ltc_tx_info(tx_hash, wait=True, exit_script=False): return r.json() except Exception as e: logger.warning(e) - print("err in get_ltc_tx_info") + logger.info("err in get_ltc_tx_info") return {"err":str(e)} if exit_script: logger.warning("Exiting script to avoid API rate limits...") diff --git a/code/scripts/lib_coins.py b/code/scripts/lib_coins.py index 2587bb86..7c523098 100644 --- a/code/scripts/lib_coins.py +++ b/code/scripts/lib_coins.py @@ -13,6 +13,7 @@ from lib_crypto import SMARTCHAIN_BASE_58 from lib_helper import get_season_coins from decorators import print_runtime +from logger import logger ''' This script scans the komodo, coins and dpow repositories and updates contexual info about the coins in the "coins" table. @@ -125,14 +126,14 @@ def get_coins_repo_electrums(electrums, coins_data): data = requests.get(electrums[coin]).json() logger.info(f"[get_coins_repo_electrums] {coin}") for item in data: - print(item) + logger.info(item) if "protocol" in item: if item['protocol'] == "SSL": coins_data[coin]['electrums_ssl'].append(item['url']) if 'ws_url' in item: coins_data[coin]['electrums_wss'].append(item['ws_url']) elif item['protocol'] == "WSS": - print(item) + logger.info(item) if 'ws_url' in item: coins_data[coin]['electrums_wss'].append(item['ws_url']) elif 'url' in item: @@ -281,7 +282,7 @@ def remove_delisted_coins(dpow_coins): delisted_coins = list(set(db_coins) - set(coins_repo_coins)) for coin in delisted_coins: if coin not in dpow_coins: - print(f"delisting {coin}") + logger.info(f"delisting {coin}") delist_coin(coin) @@ -297,7 +298,7 @@ def parse_electrum_explorer(coins_data): explorers = {} for item in explorers_data: if 'message' in explorers_data: - print(explorers_data) + logger.info(explorers_data) if item["name"] not in ["deprecated", "explorer_paths.json"]: explorers.update({item["name"]:item["download_url"]}) diff --git a/code/scripts/lib_color.py b/code/scripts/lib_color.py index f3e3411b..a38edcd9 100644 --- a/code/scripts/lib_color.py +++ b/code/scripts/lib_color.py @@ -1,5 +1,8 @@ #!/usr/bin/env python3.12 +from logger import logger + +TODO: sunset this def colorize(string, color): colors = { 'black':'\033[30m', @@ -30,23 +33,23 @@ def colorize(string, color): def color_input(msg): return input(colorize(msg, "orange")) -def table_print(msg): - print(colorize(msg, "cyan")) +def table_logger.info(msg): + logger.info(colorize(msg, "cyan")) -def info_print(msg): - print(colorize(msg, "orange")) +def info_logger.info(msg): + logger.info(colorize(msg, "orange")) -def status_print(msg): - print(colorize(msg, "status")) +def status_logger.info(msg): + logger.info(colorize(msg, "status")) -def success_print(msg): - print(colorize(msg, "green")) +def success_logger.info(msg): + logger.info(colorize(msg, "green")) -def error_print(msg): - print(colorize(msg, "error")) +def error_logger.info(msg): + logger.info(colorize(msg, "error")) -def fade_print(msg): - print(colorize(msg, "darkgrey")) +def fade_logger.info(msg): + logger.info(colorize(msg, "darkgrey")) def wait_continue(): color_input("Press [Enter] to continue...") diff --git a/code/scripts/lib_const.py b/code/scripts/lib_const.py index fd4af824..ca54a31e 100644 --- a/code/scripts/lib_const.py +++ b/code/scripts/lib_const.py @@ -95,4 +95,4 @@ COINS_CONFIG_URL = "https://raw.githubusercontent.com/KomodoPlatform/coins/master/utils/coins_config.json" COINS_CONFIG_PATH = f"{SCRIPT_PATH}/coins_config.json" -print(f"{int(time.time()) - NOW} sec to complete const") \ No newline at end of file +logger.info(f"{int(time.time()) - NOW} sec to complete const") \ No newline at end of file diff --git a/code/scripts/lib_crypto.py b/code/scripts/lib_crypto.py index 1c52a5fa..e06c6070 100644 --- a/code/scripts/lib_crypto.py +++ b/code/scripts/lib_crypto.py @@ -9,6 +9,7 @@ from bitcoin.wallet import P2PKHBitcoinAddress import alerts import lib_urls +from logger import logger # For more params, check a project's /src/chainparams.cpp file @@ -126,7 +127,7 @@ class GLEEC_3P_CoinParams(CoreMainParams): if coin in COIN_PARAMS: COIN_PARAMS.update({coin: COIN_PARAMS[coin]}) else: - print(alerts.send_telegram(f"{__name__}: {coin} doesnt have params defined!")) + logger.info(alerts.send_telegram(f"{__name__}: {coin} doesnt have params defined!")) SMARTCHAIN_BASE_58 = { diff --git a/code/scripts/lib_dpow_const.py b/code/scripts/lib_dpow_const.py index f9b0dc47..67fb357e 100644 --- a/code/scripts/lib_dpow_const.py +++ b/code/scripts/lib_dpow_const.py @@ -8,6 +8,7 @@ from lib_crypto import * from notary_pubkeys import NOTARY_PUBKEYS from notary_candidates import CANDIDATE_ADDRESSES +from logger import logger def get_scoring_epochs_repo_data(branch='master'): @@ -697,7 +698,7 @@ def get_season_server_coins(season, server): SEASON = _season -print(f"{int(time.time()) - NOW} sec to complete dpow const") +logger.info(f"{int(time.time()) - NOW} sec to complete dpow const") NEXT_SEASON_COINS = [] diff --git a/code/scripts/lib_electrum.py b/code/scripts/lib_electrum.py index 56ead7eb..e4c11c52 100644 --- a/code/scripts/lib_electrum.py +++ b/code/scripts/lib_electrum.py @@ -5,6 +5,7 @@ from lib_helper import get_electrum_url_port from lib_const import ELECTRUMS, ELECTRUMS_SSL, ELECTRUMS_WSS, logger from lib_crypto import * +from logger import logger socket.setdefaulttimeout(5) @@ -45,7 +46,7 @@ def get_full_electrum_balance(pubkey, coin): total = total_confirmed + total_unconfirmed return total/100000000 except Exception as e: - print(f"Error in [get_full_electrum_balance] with ELECTRUMS_SSL for {coin}: {e}") + logger.info(f"Error in [get_full_electrum_balance] with ELECTRUMS_SSL for {coin}: {e}") if coin in ELECTRUMS: for electrum in ELECTRUMS[coin]: @@ -64,7 +65,7 @@ def get_full_electrum_balance(pubkey, coin): total = total_confirmed + total_unconfirmed return total/100000000 except Exception as e: - print(f"Error in [get_full_electrum_balance] with ELECTRUMS for {coin}: {e}") + logger.info(f"Error in [get_full_electrum_balance] with ELECTRUMS for {coin}: {e}") return -1 @@ -81,7 +82,7 @@ def get_notary_utxo_count(coin, pubkey): num_unspent +=1 return num_unspent else: - print(f"ELECTRUM returning 'int' response for {coin}") + logger.info(f"ELECTRUM returning 'int' response for {coin}") elif coin in ELECTRUMS: @@ -96,7 +97,7 @@ def get_notary_utxo_count(coin, pubkey): num_unspent +=1 return num_unspent else: - print(f"ELECTRUM returning 'int' response for {coin}") + logger.info(f"ELECTRUM returning 'int' response for {coin}") else: logger.info(f"{coin} not in electrums or electrums_ssl") diff --git a/code/scripts/lib_epochs.py b/code/scripts/lib_epochs.py index 40d8685a..bfc6aeca 100644 --- a/code/scripts/lib_epochs.py +++ b/code/scripts/lib_epochs.py @@ -6,6 +6,7 @@ from models import ntx_tenure_row, scoring_epoch_row from decorators import print_runtime from lib_helper import get_season_coins +from logger import logger @print_runtime @@ -89,7 +90,7 @@ def update_tenure(season): if now < SEASONS_INFO[season]["start_time"] and s_start - now < 604800: - print("Pre-season epoch pop!") + logger.info("Pre-season epoch pop!") if server in NEXT_SEASON_COINS: season_server_coins = NEXT_SEASON_COINS[server] else: @@ -118,7 +119,7 @@ def update_epochs(season): if now < SEASONS_INFO[season]["start_time"] and s_start - now < 604800: - print("Pre-season epoch pop!") + logger.info("Pre-season epoch pop!") if server in NEXT_SEASON_COINS: active_coins = NEXT_SEASON_COINS[server] num_coins = len(active_coins) diff --git a/code/scripts/lib_github.py b/code/scripts/lib_github.py index 2c972d5d..f930be94 100644 --- a/code/scripts/lib_github.py +++ b/code/scripts/lib_github.py @@ -4,6 +4,7 @@ import requests from dotenv import load_dotenv from lib_color import * +from logger import logger load_dotenv() @@ -37,7 +38,7 @@ def check_release_exists(org, repo, release_name): r = gh.get(f"{base_url}/repos/{org}/{repo}/releases").json() for release in r: if release["name"] == release_name: - status_print(f"A release with the name '{release_name}' already exists at {release['html_url']}!") + status_logger.info(f"A release with the name '{release_name}' already exists at {release['html_url']}!") return True return False @@ -47,11 +48,11 @@ def create_release(owner, repo, data): url = f"{base_url}/repos/{owner}/{repo}/releases" r = gh.post(url, data=data) if 'html_url' in r.json(): - success_print(f"Draft release created at {r.json()['html_url']}") + success_logger.info(f"Draft release created at {r.json()['html_url']}") else: - error_print("Error creating release!") - error_print(data) - error_print(r.json()) + error_logger.info("Error creating release!") + error_logger.info(data) + error_logger.info(r.json()) def get_run_branch(run_url): diff --git a/code/scripts/lib_mining.py b/code/scripts/lib_mining.py index 5518c732..12cd90cf 100644 --- a/code/scripts/lib_mining.py +++ b/code/scripts/lib_mining.py @@ -14,6 +14,7 @@ import lib_api as api import lib_validate import lib_helper +from logger import logger script_path = os.path.abspath(os.path.dirname(sys.argv[0])) @@ -46,9 +47,9 @@ def update_mined_rows(rescan_blocks, coin="KMD", prices=None): date = date.split("-") date.reverse() date = "-".join(date) - print(date) - print(season) - #print(prices) + logger.info(date) + logger.info(season) + #logger.info(prices) if season not in prices: prices.update({season: {}}) @@ -62,13 +63,13 @@ def update_mined_rows(rescan_blocks, coin="KMD", prices=None): price_updated = True if "btc" in api_prices: prices[season][f"{date}"].update(api_prices) - print(prices[season][f"{date}"]) - #print(prices[season][f"{date}"]) + logger.info(prices[season][f"{date}"]) + #logger.info(prices[season][f"{date}"]) time.sleep(1) else: prices[season][f"{date}"].update({"btc":0,"usd":0}) - #print(prices[season][date]) + #logger.info(prices[season][date]) if 'usd' in prices[season][date]: row.usd_price = Decimal(prices[season][date]['usd']) if 'btc' in prices[season][date]: @@ -102,7 +103,7 @@ def update_mined_table(season, coin="KMD", start_block=None): with open(f"{script_path}/prices_history.json", "r") as j: prices = json.load(j) except Exception as e: - print(e) + logger.info(e) prices = {} update_mined_rows(rescan_blocks, "KMD", prices) @@ -114,7 +115,7 @@ def update_mined_count_daily_table(season, rescan=None, since_genesis=False): with open(f"{script_path}/prices_history.json", "r") as j: prices = json.load(j) except Exception as e: - print(e) + logger.info(e) prices = {} if season != "since_genesis": @@ -132,7 +133,7 @@ def update_mined_count_daily_table(season, rescan=None, since_genesis=False): start = end - datetime.timedelta(days=30) logger.info(f"[process_mined_aggregates] Aggregating daily mined counts from {start} to {end}") - print(f"[process_mined_aggregates] Aggregating daily mined counts from {start} to {end}") + logger.info(f"[process_mined_aggregates] Aggregating daily mined counts from {start} to {end}") while start <= end: date = f"{start}".split("-") diff --git a/code/scripts/lib_ntx.py b/code/scripts/lib_ntx.py index 8095175f..e84c5ddc 100644 --- a/code/scripts/lib_ntx.py +++ b/code/scripts/lib_ntx.py @@ -14,6 +14,7 @@ from decorators import print_runtime from lib_const import * from models import * +from logger import logger # Notarised table @@ -574,7 +575,7 @@ def add_scores_counts(self): coin = item[2] server_epoch_coin_count = item[3] server_epoch_coin_score = item[4] - print(item) + logger.info(item) if len({server, epoch}.intersection({"Unofficial", "LTC", "BTC", "None"})) == 0: diff --git a/code/scripts/lib_query.py b/code/scripts/lib_query.py index 54cf7eef..8fed7d86 100644 --- a/code/scripts/lib_query.py +++ b/code/scripts/lib_query.py @@ -4,6 +4,7 @@ from lib_query_ntx import * from lib_filter import * from decorators import print_runtime +from logger import logger def get_mined_date_aggregates(day): @@ -120,8 +121,8 @@ def get_max_value_mined_txid(max_value, season=None): if len(results) > 0: return results[0] except Exception as e: - #print(sql) - print(e) + #logger.info(sql) + logger.info(e) return '' diff --git a/code/scripts/lib_query_ntx.py b/code/scripts/lib_query_ntx.py index 94e257b0..57b8e2cf 100644 --- a/code/scripts/lib_query_ntx.py +++ b/code/scripts/lib_query_ntx.py @@ -4,6 +4,7 @@ from lib_db import connect_db import lib_helper as helper from decorators import print_runtime +from logger import logger def select_from_notarised_tbl_where( @@ -230,8 +231,8 @@ def get_notarised_coin_date_aggregates(season, day): results = CURSOR.fetchall() return results except Exception as e: - print(e) - print(sql) + logger.info(e) + logger.info(sql) logger.warning(f"No get_notarised_coin_date_aggregates results for {day} {season}") return () @@ -328,17 +329,17 @@ def get_ntx_scored(season, server, coin, lowest_blocktime, highest_blocktime): CURSOR.execute(sql) unofficial_resp = CURSOR.fetchall() - print(f"------------------------------------") - print(f"Coin: {coin}") - print(f"Season: {season}") - print(f"Server: {server}") - print(f"lowest_blocktime: {lowest_blocktime}") - print(f"highest_blocktime: {highest_blocktime}") - print(f"in_season_server_resp: {len(in_season_server_resp)}") - print(f"scored_resp: {len(scored_resp)}") - print(f"unscored_resp: {len(unscored_resp)}") - print(f"unofficial_resp: {len(unofficial_resp)}") - print(f"------------------------------------") + logger.info(f"------------------------------------") + logger.info(f"Coin: {coin}") + logger.info(f"Season: {season}") + logger.info(f"Server: {server}") + logger.info(f"lowest_blocktime: {lowest_blocktime}") + logger.info(f"highest_blocktime: {highest_blocktime}") + logger.info(f"in_season_server_resp: {len(in_season_server_resp)}") + logger.info(f"scored_resp: {len(scored_resp)}") + logger.info(f"unscored_resp: {len(unscored_resp)}") + logger.info(f"unofficial_resp: {len(unofficial_resp)}") + logger.info(f"------------------------------------") for item in scored_resp: scored_list.append(scored_resp[0]) diff --git a/code/scripts/lib_rpc.py b/code/scripts/lib_rpc.py index e6def37b..9b24f91c 100644 --- a/code/scripts/lib_rpc.py +++ b/code/scripts/lib_rpc.py @@ -5,6 +5,7 @@ from slickrpc import Proxy import lib_const import lib_crypto +from logger import logger # define data dir def def_data_dir(): @@ -38,13 +39,13 @@ def def_credentials(coin): if coin == 'KMD': rpcport = 7771 else: - print("rpcport not in conf file, exiting") - print("check " + coin_config_file) + logger.info("rpcport not in conf file, exiting") + logger.info("check " + coin_config_file) exit(1) try: return (Proxy("http://%s:%s@127.0.0.1:%d" % (rpcuser, rpcpassword, int(rpcport)), timeout=300)) except: - print("Unable to set RPC proxy, please confirm rpcuser, rpcpassword and rpcport are set in "+coin_config_file) + logger.info("Unable to set RPC proxy, please confirm rpcuser, rpcpassword and rpcport are set in "+coin_config_file) def get_ntx_txids(start, end): return RPC["KMD"].getaddresstxids({"addresses": [lib_const.NTX_ADDR], "start":start, "end":end}) @@ -56,5 +57,5 @@ def get_ntx_txids(start, end): try: RPC[coin] = def_credentials(coin) except: - #print(f"{coin} RPC failed") + #logger.info(f"{coin} RPC failed") pass \ No newline at end of file diff --git a/code/scripts/lib_tests.py b/code/scripts/lib_tests.py index ec85f598..83094106 100644 --- a/code/scripts/lib_tests.py +++ b/code/scripts/lib_tests.py @@ -19,6 +19,7 @@ import lib_urls import lib_validate import lib_wallet +from logger import logger # TODO: For CI, these tests need to be separated. @@ -590,7 +591,7 @@ def test_get_season_ntx_dict(season): sum_count = 0 sum_score = 0 for server in season_ntx_dict["notaries"]["alien_AR"]["servers"]: - print(season_ntx_dict["notaries"]["alien_AR"]["servers"][server]) + logger.info(season_ntx_dict["notaries"]["alien_AR"]["servers"][server]) sum_count += season_ntx_dict["notaries"]["alien_AR"]["servers"][server]["notary_server_ntx_count"] sum_score += season_ntx_dict["notaries"]["alien_AR"]["servers"][server]["notary_server_ntx_score"] assert round(sum_count, 4) == round(season_ntx_dict["notaries"]["alien_AR"]["notary_ntx_count"], 4) diff --git a/code/scripts/lib_update.py b/code/scripts/lib_update.py index 3e4d7d0f..bd80d366 100644 --- a/code/scripts/lib_update.py +++ b/code/scripts/lib_update.py @@ -5,6 +5,7 @@ from datetime import datetime as dt from lib_const import * from lib_update_ntx import * +from logger import logger #### KMD SUPPLY TABLE @@ -123,7 +124,7 @@ def delist_coin(coin): sql = f"DELETE FROM coins WHERE coin = '{coin}';" CURSOR.execute(sql) CONN.commit() - #print(sql) + #logger.info(sql) return 1 except Exception as e: logger.debug(e) @@ -318,10 +319,10 @@ def update_coin_social_row(row_data): telegram='{row_data[9]}', twitter='{row_data[10]}', \ youtube='{row_data[11]}', website='{row_data[12]}', \ season='{row_data[13]}';" - #print(sql) + #logger.info(sql) CURSOR.execute(sql, row_data) CONN.commit() - print("commited") + logger.info("commited") return 1 except Exception as e: if str(e).find('Duplicate') == -1: @@ -562,7 +563,7 @@ def update_notary_vote_row(row_data): try: CURSOR.execute(sql, row_data) CONN.commit() - print("Notary Vote Row updated") + logger.info("Notary Vote Row updated") except Exception as e: logger.debug(e) if str(e).find('duplicate') == -1: diff --git a/code/scripts/lib_update_ntx.py b/code/scripts/lib_update_ntx.py index 20daa70c..9ada3c27 100644 --- a/code/scripts/lib_update_ntx.py +++ b/code/scripts/lib_update_ntx.py @@ -4,6 +4,7 @@ from psycopg2.extras import execute_values from lib_const import * from lib_filter import get_notarised_conditions_filter +from logger import logger def update_ntx_row(row_data, table='notarised', unique='unique_txid'): @@ -39,7 +40,7 @@ def update_server_notarised_tbl(old_server, server): try: CURSOR.execute(sql) CONN.commit() - print(f"{old_server} reclassed as {server}") + logger.info(f"{old_server} reclassed as {server}") except Exception as e: logger.debug(e) CONN.rollback() @@ -68,7 +69,7 @@ def update_unofficial_coin_notarised_tbl(season, coin): try: CURSOR.execute(sql) CONN.commit() - print(f"Unofficial coin {coin} updated for {season}") + logger.info(f"Unofficial coin {coin} updated for {season}") except Exception as e: logger.debug(e) CONN.rollback() @@ -112,7 +113,7 @@ def update_txid_score_notarised_tbl(txid, scored, score_value): try: CURSOR.execute(sql) CONN.commit() - print(f"{txid} tagged as {scored} ({score_value})") + logger.info(f"{txid} tagged as {scored} ({score_value})") except Exception as e: logger.debug(e) CONN.rollback() @@ -133,7 +134,7 @@ def update_season_server_addresses_notarised_tbl(txid, season, server, addresses try: CURSOR.execute(sql) CONN.commit() - print(f"{txid} tagged as {season}") + logger.info(f"{txid} tagged as {season}") except Exception as e: logger.debug(e) CONN.rollback() diff --git a/code/scripts/lib_urls.py b/code/scripts/lib_urls.py index aa09cd6e..7b6adac8 100644 --- a/code/scripts/lib_urls.py +++ b/code/scripts/lib_urls.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3.12 import os from dotenv import load_dotenv +from logger import logger # ENV VARS load_dotenv() @@ -38,7 +39,7 @@ def get_electrums_wss_info_url(local=True): def get_ntxid_list_url(season, server, coin, local=True): api_server = get_api_server(local) url = f"{api_server}/api/info/notarisation_txid_list/?season={season}&server={server}&coin={coin}" - print(url) + logger.info(url) return url diff --git a/code/scripts/lib_vote.py b/code/scripts/lib_vote.py index b9d54867..340b9454 100644 --- a/code/scripts/lib_vote.py +++ b/code/scripts/lib_vote.py @@ -7,6 +7,7 @@ import lib_query_ntx as query import lib_github as git from notary_candidates import CANDIDATE_ADDRESSES +from logger import logger # Notarised table class notary_vote(): @@ -62,9 +63,9 @@ def get_vote_row(self, block_height, raw_tx, txid, vin_addresses, vout, vouts): row.year = self.year return row else: - print(f"{txid} not a vote tx") + logger.info(f"{txid} not a vote tx") else: - print(f"{txid} looks like a self-send") + logger.info(f"{txid} looks like a self-send") elif "addresses" in vout["scriptPubKey"]: if len(vout["scriptPubKey"]["addresses"]) == 1: @@ -97,9 +98,9 @@ def get_vote_row(self, block_height, raw_tx, txid, vin_addresses, vout, vouts): row.year = self.year return row else: - print(f"{txid} not a vote tx") + logger.info(f"{txid} not a vote tx") else: - print(f"{txid} looks like a self-send") + logger.info(f"{txid} looks like a self-send") else: for address in len(vout["scriptPubKey"]["addresses"]): @@ -132,9 +133,9 @@ def get_vote_row(self, block_height, raw_tx, txid, vin_addresses, vout, vouts): row.year = self.year return row else: - print(f"{txid} not a vote tx") + logger.info(f"{txid} not a vote tx") else: - print(f"{txid} looks like a self-send") + logger.info(f"{txid} looks like a self-send") return None diff --git a/code/scripts/lib_wallet.py b/code/scripts/lib_wallet.py index cb965101..58317795 100644 --- a/code/scripts/lib_wallet.py +++ b/code/scripts/lib_wallet.py @@ -18,6 +18,7 @@ from lib_helper import get_pubkeys from models import addresses_row, rewards_tx_row, kmd_supply_row from lib_threads import update_notary_balances_thread +from logger import logger script_path = os.path.abspath(os.path.dirname(sys.argv[0])) @@ -125,7 +126,7 @@ def update_supply(coin="KMD"): scan_blocks = list(set([*range(1, TIP, 1)]) - set(existing_blocks)) scan_blocks.sort() for block in scan_blocks: - print(block) + logger.info(block) update_supply_for_block(coin, block) diff --git a/code/scripts/scan_mining.py b/code/scripts/scan_mining.py index a2bd184e..d0bd252b 100644 --- a/code/scripts/scan_mining.py +++ b/code/scripts/scan_mining.py @@ -2,12 +2,13 @@ import json import requests +from logger import logger all_data = [] next_url = "http://116.203.120.91:8762/api/source/mined/" while next_url: - print(next_url) + logger.info(next_url) data = requests.get(next_url).json() next_url = data["next"] diff --git a/code/scripts/scan_vote_snapshot.py b/code/scripts/scan_vote_snapshot.py index 549e462d..06484981 100644 --- a/code/scripts/scan_vote_snapshot.py +++ b/code/scripts/scan_vote_snapshot.py @@ -2,6 +2,7 @@ import json from lib_helper import get_nn_region_split from notary_candidates import CANDIDATE_ADDRESSES +from logger import logger with open('VOTE2022_22920.json', 'r') as j: @@ -35,11 +36,11 @@ for region in regions: - print(region) + logger.info(region) region_scores[region].sort() - print(region_scores[region]) + logger.info(region_scores[region]) region_scores[region].reverse() - print(region_scores[region]) + logger.info(region_scores[region]) for region in regions: for nn in regions[region]: diff --git a/code/scripts/validate_tables.py b/code/scripts/validate_tables.py index 64a92a1f..23faade9 100755 --- a/code/scripts/validate_tables.py +++ b/code/scripts/validate_tables.py @@ -4,6 +4,7 @@ from lib_helper import * from lib_validate import * from lib_query import * +from logger import logger tables = ["addresses", "balances", "coin_sync", "coins", "coin_social", "funding_transactions", "notary_last_ntx", "mined", "mined_count_daily", @@ -17,7 +18,7 @@ for server in SEASONS_INFO[season]["servers"]: for epoch in SEASONS_INFO[season]["servers"][server]["epochs"]: epoch_data = SEASONS_INFO[season]["servers"][server]["epochs"][epoch] - print(epoch_data) + logger.info(epoch_data) score_per_ntx = epoch_data["score_per_ntx"] epoch_start = epoch_data["start_time"] epoch_end = epoch_data["end_time"] @@ -25,15 +26,15 @@ epoch_coins.sort() notarised_coins = get_notarised_coins(season, server, epoch) - print(f"{len(notarised_coins)} coins for {season} {server} {epoch}") + logger.info(f"{len(notarised_coins)} coins for {season} {server} {epoch}") notarised_coins.sort() for coin in notarised_coins: if coin not in epoch_coins: logger.warning(f"Invalid coin {coin} in notarised for {season} {server} {epoch}") epoch_scores = get_notarised_server_epoch_scores(season, server, epoch)[server][epoch] - print(epoch_scores) - print(f"{len(epoch_scores)} ntx score for {season} {server} {epoch}") + logger.info(epoch_scores) + logger.info(f"{len(epoch_scores)} ntx score for {season} {server} {epoch}") if len(epoch_scores) > 1: logger.warning(f"Invalid epoch scores {epoch_scores} in notarised for {season} {server} {epoch}") elif epoch_scores[0] != score_per_ntx: diff --git a/code/scripts/verify_other_nn_btc_tx.py b/code/scripts/verify_other_nn_btc_tx.py index 91aee4a7..cd80594c 100755 --- a/code/scripts/verify_other_nn_btc_tx.py +++ b/code/scripts/verify_other_nn_btc_tx.py @@ -14,6 +14,7 @@ from dotenv import load_dotenv from logging import Handler, Formatter from lib_const import * +from logger import logger load_dotenv() @@ -55,7 +56,7 @@ def format(self, record): notaries_with_others = {} for notary in notaries: - print(f"Checking {notary}") + logger.info(f"Checking {notary}") params = f"?season={season}¬ary={notary}&category=Other" r = requests.get(f"{THIS_SERVER}/api/info/notary_btc_transactions/{params}") results = r.json()["results"] @@ -67,27 +68,27 @@ def format(self, record): "txids":[] } }) - print(f"{notary} has {len(txids)} unrecognised transactions") + logger.info(f"{notary} has {len(txids)} unrecognised transactions") for txid in txids: notaries_with_others[notary]['txids'].append(f"https://www.blockchain.com/btc/tx/{txid}") msg = f"### Uncategorised BTC Transactions ###\n" for notary in notaries_with_others: - print(f"{notary}") + logger.info(f"{notary}") msg += f"### {notary} ###\n" for txid in notaries_with_others[notary]['txids']: - print(txid) + logger.info(txid) msg += f"{txid}\n" if len(msg) > 2000: - print(msg) + logger.info(msg) logger.warning(msg) msg = '' if msg == f"### Uncategorised BTC Transactions ###\n": pass elif msg != '': - print(msg) + logger.info(msg) logger.warning(msg) diff --git a/code/scripts/verify_other_nn_ltc_tx.py b/code/scripts/verify_other_nn_ltc_tx.py index fb0c510c..a12f774b 100755 --- a/code/scripts/verify_other_nn_ltc_tx.py +++ b/code/scripts/verify_other_nn_ltc_tx.py @@ -14,6 +14,7 @@ from dotenv import load_dotenv from logging import Handler, Formatter from lib_const import * +from logger import logger load_dotenv() @@ -55,7 +56,7 @@ def format(self, record): notaries_with_others = {} for notary in notaries: - print(f"Checking {notary}") + logger.info(f"Checking {notary}") params = f"?season={season}¬ary={notary}&category=Other" r = requests.get(f"{THIS_SERVER}/api/info/notary_ltc_transactions/{params}") results = r.json()["results"] @@ -67,27 +68,27 @@ def format(self, record): "txids":[] } }) - print(f"{notary} has {len(txids)} unrecognised transactions") + logger.info(f"{notary} has {len(txids)} unrecognised transactions") for txid in txids: notaries_with_others[notary]['txids'].append(f"https://www.blockchain.com/ltc/tx/{txid}") msg = f"### Uncategorised LTC Transactions ###\n" for notary in notaries_with_others: - print(f"{notary}") + logger.info(f"{notary}") msg += f"### {notary} ###\n" for txid in notaries_with_others[notary]['txids']: - print(txid) + logger.info(txid) msg += f"{txid}\n" if len(msg) > 2000: - print(msg) + logger.info(msg) logger.warning(msg) msg = '' if msg == f"### Uncategorised LTC Transactions ###\n": pass elif msg != '': - print(msg) + logger.info(msg) logger.warning(msg)