diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index d6fb993..e33aa6a 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -36,23 +36,10 @@ jobs: run: | python -m pip install --upgrade pip pip install -r dnas/requirements.txt - sudo apt-get install -y whois - - name: Run the Python unit tests + sudo apt-get install -y whois shellcheck + - name: Run the tox tests working-directory: /home/runner/work/dfz_name_and_shame/dfz_name_and_shame/ env: SSH_AUTH_SOCK: /tmp/ssh_agent.sock run: | - sudo mkdir /media/usb0 - sudo chmod 777 /media/usb0 - python3 dnas/tests/test_git.py -vv - python3 dnas/tests/test_bogon_asn.py -vv - python3 dnas/tests/test_bogon_ip.py -vv - python3 dnas/tests/test_mrt_archive.py -vv - python3 dnas/tests/test_mrt_archives.py -vv - python3 dnas/tests/test_mrt_entry.py -vv - python3 dnas/tests/test_mrt_getter.py -vv - python3 dnas/tests/test_mrt_parser.py -vv - python3 dnas/tests/test_mrt_splitter.py -vv - python3 dnas/tests/test_mrt_stats.py -vv - python3 dnas/tests/test_whois.py -vv - + tox diff --git a/.gitignore b/.gitignore index 7896dd5..73a321c 100644 --- a/.gitignore +++ b/.gitignore @@ -23,6 +23,9 @@ redis/data/* # Don't sync pycache **/__pycache__/ +# Don't sync tox cache +.tox + # Don't sync virtualenv **/venv/ **/.venv/ diff --git a/dnas/dnas/bogon_asn.py b/dnas/dnas/bogon_asn.py index 46d706c..bf1cc5a 100644 --- a/dnas/dnas/bogon_asn.py +++ b/dnas/dnas/bogon_asn.py @@ -2,6 +2,7 @@ from dnas.config import config as cfg + class bogon_asn: """ Class to check if an ASN is a bogon ASN (meaning reserved by an RFC, @@ -9,32 +10,30 @@ class bogon_asn: """ @staticmethod - def is_bogon(asn: int = None) -> bool: + def is_bogon(asn: int) -> bool: """ Return True if ASN is a bogon ASN, else False. """ if type(asn) != int: - raise TypeError( - f"{asn} is not an int: {type(asn)}" - ) + raise TypeError(f"{asn} is not an int: {type(asn)}") - if asn == 0: # RFC 7607 + if asn == 0: # RFC 7607 return True - elif asn == 23456: # RFC 4893 + elif asn == 23456: # RFC 4893 return True - elif asn in range(64496, 64512): # RFC 5398 + elif asn in range(64496, 64512): # RFC 5398 return True - elif asn in range(65536, 65552): # RFC 5398 + elif asn in range(65536, 65552): # RFC 5398 return True - elif asn in range(64512, 65535): # RFC 6996 + elif asn in range(64512, 65535): # RFC 6996 return True - elif asn in range(4200000000, 4294967296): # RFC 6996 + elif asn in range(4200000000, 4294967296): # RFC 6996 return True - elif asn == 65535: # RFC 6996 + elif asn == 65535: # RFC 6996 return True - elif asn == 4294967295: # RFC 6996 + elif asn == 4294967295: # RFC 6996 return True - elif asn in range(65552, 131072): # IANA reserved + elif asn in range(65552, 131072): # IANA reserved return True else: return False diff --git a/dnas/dnas/bogon_attr.py b/dnas/dnas/bogon_attr.py index cad054a..22ec6d9 100644 --- a/dnas/dnas/bogon_attr.py +++ b/dnas/dnas/bogon_attr.py @@ -1,4 +1,5 @@ -from typing import Dict +import typing + class bogon_attr: """ @@ -7,57 +8,55 @@ class bogon_attr: # https://www.iana.org/assignments/bgp-parameters/bgp-parameters.xhtml known_attrs = { - 1: "ORIGIN", # RFC4271 - 2: "AS_PATH", #RFC4271 - 3: "NEXT_HOP", # RFC4271 - 4: "MULTI_EXIT_DISC", # RFC4271 - 5: "LOCAL_PREF", # RFC4271 - 6: "ATOMIC_AGGREGATE", # RFC4271 - 7: "AGGREGATOR", # RFC4271 - 8: "COMMUNITY", # RFC1997 - 9: "ORIGINATOR_ID", # RFC4456 - 10: "CLUSTER_LIST", # RFC4456 - #11: "DPA", # Deprecated - #12: "ADVERTISER", # Deprecated - #13: "RCID_PATH/CLUSTER_ID", # Deprecated - 14: "MP_REACH_NLRI", # RFC4760 - 15: "MP_UNREACH_NLRI", # RFC4760 - 16: "EXTENDED COMMUNITIES", # RFC4360 - 17: "AS4_PATH", # RFC6793 - 18: "AS4_AGGREGATOR", # RFC6793 - #19: "SAFI Specific Attribute", # Deprecated - #20: "Connector Attribute", # Deprecated - #21: "AS_PATHLIMIT" # Deprecated - 22: "PMSI_TUNNEL", # RFC6514 - 23: "Tunnel Encapsulation Attribute", # RFC5512 - 24: "Traffic Engineering", # RFC5543 - 25: "IPv6 Address Specific Extended Community", # RFC5701 - 26: "AIGP", # RFC7311 - 27: "PE Distinguisher Labels", # RFC6514 - #28: "BGP Entropy Label Capability Attribute", # Deprecated - 29: "BGP-LS Attribute", # RFC7752 - 32: "LARGE_COMMUNITY", # RFC8092 - 33: "BGPsec_Path", # RFC8205 - 34:"BGP Community Container Attribute", # draft-ietf-idr-wide-bgp-communities - 35:"Only to Customer", # draft-ietf-idr-bgp-open-policy - 36:"BGP Domain Path", # draft-ietf-bess-evpn-ipvpn-interworking - 37: "SFP attribute", # RFC9015 - 38: "BFD Discriminator", # RFC9026 - 40: "BGP Prefix-SID", # RFC8669 - 128: "ATTR_SET", # RFC6368 - 255: "Reserved for development", #RFC2042 + 1: "ORIGIN", # RFC4271 + 2: "AS_PATH", # RFC4271 + 3: "NEXT_HOP", # RFC4271 + 4: "MULTI_EXIT_DISC", # RFC4271 + 5: "LOCAL_PREF", # RFC4271 + 6: "ATOMIC_AGGREGATE", # RFC4271 + 7: "AGGREGATOR", # RFC4271 + 8: "COMMUNITY", # RFC1997 + 9: "ORIGINATOR_ID", # RFC4456 + 10: "CLUSTER_LIST", # RFC4456 + # 11: "DPA", # Deprecated + # 12: "ADVERTISER", # Deprecated + # 13: "RCID_PATH/CLUSTER_ID", # Deprecated + 14: "MP_REACH_NLRI", # RFC4760 + 15: "MP_UNREACH_NLRI", # RFC4760 + 16: "EXTENDED COMMUNITIES", # RFC4360 + 17: "AS4_PATH", # RFC6793 + 18: "AS4_AGGREGATOR", # RFC6793 + # 19: "SAFI Specific Attribute", # Deprecated + # 20: "Connector Attribute", # Deprecated + # 21: "AS_PATHLIMIT" # Deprecated + 22: "PMSI_TUNNEL", # RFC6514 + 23: "Tunnel Encapsulation Attribute", # RFC5512 + 24: "Traffic Engineering", # RFC5543 + 25: "IPv6 Address Specific Extended Community", # RFC5701 + 26: "AIGP", # RFC7311 + 27: "PE Distinguisher Labels", # RFC6514 + # 28: "BGP Entropy Label Capability Attribute", # Deprecated + 29: "BGP-LS Attribute", # RFC7752 + 32: "LARGE_COMMUNITY", # RFC8092 + 33: "BGPsec_Path", # RFC8205 + 34: "BGP Community Container Attribute", # draft-ietf-idr-wide-bgp-communities + 35: "Only to Customer", # draft-ietf-idr-bgp-open-policy + 36: "BGP Domain Path", # draft-ietf-bess-evpn-ipvpn-interworking + 37: "SFP attribute", # RFC9015 + 38: "BFD Discriminator", # RFC9026 + 40: "BGP Prefix-SID", # RFC8669 + 128: "ATTR_SET", # RFC6368 + 255: "Reserved for development", # RFC2042 } @staticmethod - def is_unknown(attr: int = None) -> bool: + def is_unknown(attr: int) -> bool: """ Return True is BGP attr ID is a unknown/bogon, else False """ if type(attr) != int: - raise TypeError( - f"attr is not an int: {type(attr)}" - ) + raise TypeError(f"attr is not an int: {type(attr)}") if attr in bogon_attr.known_attrs: return False diff --git a/dnas/dnas/bogon_ip.py b/dnas/dnas/bogon_ip.py index c0128e2..a19c397 100644 --- a/dnas/dnas/bogon_ip.py +++ b/dnas/dnas/bogon_ip.py @@ -1,47 +1,44 @@ import ipaddress -from typing import List +import typing from dnas.config import config as cfg + class bogon_ip: """ Class to check if an IP subnet is a bogon address (meaning reserved by an RFC, the IETF, or IANA). """ - BOGON_V4_NETS: List[ipaddress.IPv4Network] = [] - for bogon in cfg.BOGONS_V4: - bog_net = ipaddress.ip_network(bogon) + BOGON_V4_NETS: list[ipaddress.IPv4Network] = [] + for v4_bogon in cfg.BOGONS_V4: + bog_net = ipaddress.ip_network(v4_bogon) if type(bog_net) != ipaddress.IPv4Network: raise TypeError( - f"v4 bogon {bogon} is not a valid IPv4 subnet: {type(bog_net)}" + f"v4 bogon {v4_bogon} is not a valid IPv4 subnet: {type(bog_net)}" ) BOGON_V4_NETS.append(bog_net) - BOGON_V6_NETS: List[ipaddress.IPv6Network] = [] - for bogon in cfg.BOGONS_V6: - bog_net = ipaddress.ip_network(bogon) + BOGON_V6_NETS: list[ipaddress.IPv6Network] = [] + for v6_bogon in cfg.BOGONS_V6: + bog_net = ipaddress.ip_network(v6_bogon) if type(bog_net) != ipaddress.IPv6Network: raise TypeError( - f"v6 bogon {bogon} is not a valid IPv6 subnet: {type(bog_net)}" + f"v6 bogon {v6_bogon} is not a valid IPv6 subnet: {type(bog_net)}" ) BOGON_V6_NETS.append(bog_net) @staticmethod - def is_v4_bogon(subnet: str = None) -> bool: + def is_v4_bogon(subnet: str) -> bool: """ Return True if IP prefix is in a v4 bogon range, else False. Expects CIDR notation as string. """ if not subnet: - raise ValueError( - f"Missing required options: subnet={subnet}" - ) + raise ValueError(f"Missing required options: subnet={subnet}") if type(subnet) != str: - raise TypeError( - f"subnet is not a string: {type(subnet)}" - ) + raise TypeError(f"subnet is not a string: {type(subnet)}") ip_net = ipaddress.ip_network(subnet) if type(ip_net) != ipaddress.IPv4Network: @@ -55,20 +52,16 @@ def is_v4_bogon(subnet: str = None) -> bool: return False @staticmethod - def is_v6_bogon(subnet: str = None) -> bool: + def is_v6_bogon(subnet: str) -> bool: """ Return True is IP prefix is in a v6 bogon range, else False. Expects CIDR notation as string. """ if not subnet: - raise ValueError( - f"Missing required options: subnet={subnet}" - ) + raise ValueError(f"Missing required options: subnet={subnet}") if type(subnet) != str: - raise TypeError( - f"subnet is not a string: {type(subnet)}" - ) + raise TypeError(f"subnet is not a string: {type(subnet)}") ip_net = ipaddress.ip_network(subnet) if type(ip_net) != ipaddress.IPv6Network: diff --git a/dnas/dnas/config.py b/dnas/dnas/config.py index a6f691d..bed5514 100644 --- a/dnas/dnas/config.py +++ b/dnas/dnas/config.py @@ -1,4 +1,5 @@ import os +import typing class config: @@ -23,6 +24,11 @@ class config: TIME_FORMAT = "%Y%m%d.%H%M" DAY_FORMAT = "%Y%m%d" + # JSON indent when exporting MRT entry to JSON + MRT_ENTRY_JSON_INDENT = 2 + # JSON indent when exporting MRT stats to JSON + MRT_STATS_JSON_INDENT = 2 + # Log mode, 'a'ppend or over'w'rite LOG_MODE = "a" # Standard logging format diff --git a/dnas/dnas/git.py b/dnas/dnas/git.py index 58df57c..cd3e32a 100644 --- a/dnas/dnas/git.py +++ b/dnas/dnas/git.py @@ -7,13 +7,14 @@ from dnas.config import config as cfg + class git: """ A class for commiting and pushing files to GitHub. """ @staticmethod - def add(filename: str = None): + def add(filename: str) -> None: """ Add files to the git index, to be commited. """ @@ -23,9 +24,7 @@ def add(filename: str = None): ) if type(filename) != str: - raise TypeError( - f"filename is not a string: {type(filename)}" - ) + raise TypeError(f"filename is not a string: {type(filename)}") ret = subprocess.run( ["git", "add", filename], @@ -42,7 +41,7 @@ def add(filename: str = None): logging.debug(f"Added {filename} to git index") @staticmethod - def clean(): + def clean() -> None: """ Remove any untracked files """ @@ -61,7 +60,7 @@ def clean(): logging.debug(f"Removed untracked fit files in {cfg.GIT_BASE}") @staticmethod - def clear(): + def clear() -> None: """ Remove all files currently in the git index for commit. """ @@ -80,11 +79,11 @@ def clear(): logging.debug(f"Cleared git index in {cfg.GIT_BASE}") @staticmethod - def clone(): + def clone() -> None: """ Clone the DNS Stats repo. """ - os.makedirs(cfg.GIT_BASE, exist_ok = True) + os.makedirs(cfg.GIT_BASE, exist_ok=True) ret = subprocess.run( ["git", "clone", cfg.GIT_STAT_CLONE_URL], @@ -103,20 +102,16 @@ def clone(): f"Cloned git repo {cfg.GIT_STAT_CLONE_URL} to {cfg.BASE_DIR}" ) - @staticmethod - def commit(msg: str = None): + @staticmethod + def commit(msg: str) -> None: """ Commit staged changes to git with commit message "msg". """ if not msg: - raise ValueError( - f"Missing required arguments: msg={msg}." - ) + raise ValueError(f"Missing required arguments: msg={msg}.") if type(msg) != str: - raise TypeError( - f"msg is not a string: {type(msg)}" - ) + raise TypeError(f"msg is not a string: {type(msg)}") ret = subprocess.run( ["git", "commit", "-m", msg], @@ -138,7 +133,7 @@ def commit(msg: str = None): logging.debug(f"Committed to git in {cfg.GIT_BASE}: {msg}") @staticmethod - def diff(): + def diff() -> bool: """ Return True if there are files in the git index, with uncommitted changes, else False. @@ -159,12 +154,11 @@ def diff(): logging.debug(f"No changes staged in git cache in {cfg.GIT_BASE}") return False else: - logging.debug( - f"Changes are staged git in cache in {cfg.GIT_BASE}") + logging.debug(f"Changes are staged git in cache in {cfg.GIT_BASE}") return True @staticmethod - def git_exists(): + def git_exists() -> bool: """ Return True if DNAS Stats repo exists locally. """ @@ -185,19 +179,15 @@ def git_exists(): return True @staticmethod - def gen_git_path_ymd(ymd: str = None) -> str: + def gen_git_path_ymd(ymd: str) -> str: """ Generate and return the path to the report files for a specific date. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}." - ) + raise ValueError(f"Missing required arguments: ymd={ymd}.") if type(ymd) != str: - raise TypeError( - f"ymd is not a string: {type(ymd)}" - ) + raise TypeError(f"ymd is not a string: {type(ymd)}") day = datetime.datetime.strptime(ymd, cfg.DAY_FORMAT) @@ -208,19 +198,15 @@ def gen_git_path_ymd(ymd: str = None) -> str: return git_dir @staticmethod - def gen_git_url_ymd(ymd: str = None) -> str: + def gen_git_url_ymd(ymd: str) -> str: """ Generate and return the URL to the report files for a specific date. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}." - ) + raise ValueError(f"Missing required arguments: ymd={ymd}.") if type(ymd) != str: - raise TypeError( - f"ymd is not a string: {type(ymd)}" - ) + raise TypeError(f"ymd is not a string: {type(ymd)}") day = datetime.datetime.strptime(ymd, cfg.DAY_FORMAT) @@ -231,7 +217,7 @@ def gen_git_url_ymd(ymd: str = None) -> str: return git_url @staticmethod - def pull(): + def pull() -> None: """ Perform a git pull to make sure the local repo is up to date """ @@ -248,12 +234,10 @@ def pull(): f"stdout: {ret.stdout.decode()}\n" f"stderr: {ret.stderr.decode()}" ) - logging.debug( - f"Git pull succeeded" - ) + logging.debug(f"Git pull succeeded") @staticmethod - def push(): + def push() -> None: """ Push commits to GitHub. """ diff --git a/dnas/dnas/log.py b/dnas/dnas/log.py index 9e334e6..bf043c9 100644 --- a/dnas/dnas/log.py +++ b/dnas/dnas/log.py @@ -1,17 +1,17 @@ import logging import os +import typing from dnas.config import config as cfg + class log: """ Class to standardise logging across all DNAS scripts. """ @staticmethod - def setup( - debug: bool = False, log_src: str = None, log_path: str = None - ): + def setup(log_src: str, log_path: str, debug: bool = False) -> None: """ Set up logging in a standard format. """ @@ -28,8 +28,8 @@ def setup( level=logging.DEBUG, handlers=[ logging.FileHandler(log_path, mode=cfg.LOG_MODE), - logging.StreamHandler() - ] + logging.StreamHandler(), + ], ) else: logging.basicConfig( @@ -37,8 +37,8 @@ def setup( level=logging.INFO, handlers=[ logging.FileHandler(log_path, mode=cfg.LOG_MODE), - logging.StreamHandler() - ] + logging.StreamHandler(), + ], ) logging.info( diff --git a/dnas/dnas/mrt_archive.py b/dnas/dnas/mrt_archive.py index 2e4f198..ae940c8 100644 --- a/dnas/dnas/mrt_archive.py +++ b/dnas/dnas/mrt_archive.py @@ -1,70 +1,52 @@ -from functools import reduce import datetime import os import re -from typing import List -import urllib.parse +import typing from dnas.config import config as cfg -class mrt_archive: +class mrt_archive: def __init__( self, - BASE_URL: str = None, - ENABLED: bool = False, - MRT_DIR: str = None, - MRT_EXT: str = None, - NAME: str = None, - RIB_GLOB: str = None, - RIB_INTERVAL: int = None, - RIB_KEY: str = None, - RIB_OFFSET: int = None, - RIB_PREFIX: str = None, - RIB_URL: str = None, - TYPE: str = None, - UPD_GLOB: str = None, - UPD_INTERVAL: int = None, - UPD_KEY: str = None, - UPD_OFFSET: int = None, - UPD_PREFIX: str = None, - UPD_URL: str = None, - STRIP_COMM: str = "", - - ): - + BASE_URL: str, + ENABLED: bool, + MRT_DIR: str, + MRT_EXT: str, + NAME: str, + RIB_GLOB: str, + RIB_INTERVAL: int, + RIB_KEY: str, + RIB_OFFSET: int, + RIB_PREFIX: str, + RIB_URL: str, + TYPE: str, + UPD_GLOB: str, + UPD_INTERVAL: int, + UPD_KEY: str, + UPD_OFFSET: int, + UPD_PREFIX: str, + UPD_URL: str, + STRIP_COMM: str, + ) -> None: if type(BASE_URL) != str: - raise TypeError( - f"BASE_URL is not of type str: {type(BASE_URL)}" - ) + raise TypeError(f"BASE_URL is not of type str: {type(BASE_URL)}") if type(ENABLED) != bool: - raise TypeError( - f"ENABLED is not of type bool: {type(ENABLED)}" - ) + raise TypeError(f"ENABLED is not of type bool: {type(ENABLED)}") if type(MRT_DIR) != str: - raise TypeError( - f"MRT_DIR is not of type str: {type(MRT_DIR)}" - ) + raise TypeError(f"MRT_DIR is not of type str: {type(MRT_DIR)}") if type(MRT_EXT) != str: - raise TypeError( - f"MRT_EXT is not of type str: {type(MRT_EXT)}" - ) + raise TypeError(f"MRT_EXT is not of type str: {type(MRT_EXT)}") if type(NAME) != str: - raise TypeError( - f"NAME is not of type str: {type(NAME)}" - ) + raise TypeError(f"NAME is not of type str: {type(NAME)}") if type(RIB_GLOB) != str: - raise TypeError( - f"RIB_GLOB is not of type str: {type(RIB_GLOB)}" - ) + raise TypeError(f"RIB_GLOB is not of type str: {type(RIB_GLOB)}") if type(RIB_INTERVAL) != int: raise TypeError( f"RIB_INTERVAL is not of type str: {type(RIB_INTERVAL)}" ) if type(RIB_KEY) != str: - raise TypeError( - f"RIB_KEY is not of type str: {type(RIB_KEY)}" - ) + raise TypeError(f"RIB_KEY is not of type str: {type(RIB_KEY)}") if type(RIB_OFFSET) != int: raise TypeError( f"RIB_OFFSET is not of type int: {type(RIB_OFFSET)}" @@ -74,25 +56,17 @@ def __init__( f"RIB_PREFIX is not of type str: {type(RIB_PREFIX)}" ) if type(RIB_URL) != str: - raise TypeError( - f"RIB_URL is not of type str: {type(RIB_URL)}" - ) + raise TypeError(f"RIB_URL is not of type str: {type(RIB_URL)}") if type(TYPE) != str: - raise TypeError( - f"TYPE is not of type str: {type(TYPE)}" - ) + raise TypeError(f"TYPE is not of type str: {type(TYPE)}") if type(UPD_GLOB) != str: - raise TypeError( - f"UPD_GLOB is not of type str: {type(UPD_GLOB)}" - ) + raise TypeError(f"UPD_GLOB is not of type str: {type(UPD_GLOB)}") if type(UPD_INTERVAL) != int: raise TypeError( f"UPD_INTERVAL is not of type str: {type(UPD_INTERVAL)}" ) if type(UPD_KEY) != str: - raise TypeError( - f"UPD_KEY is not of type str: {type(UPD_KEY)}" - ) + raise TypeError(f"UPD_KEY is not of type str: {type(UPD_KEY)}") if type(UPD_OFFSET) != int: raise TypeError( f"UPD_OFFSET is not of type int: {type(UPD_OFFSET)}" @@ -102,9 +76,7 @@ def __init__( f"UPD_PREFIX is not of type str: {type(UPD_PREFIX)}" ) if type(UPD_URL) != str: - raise TypeError( - f"UPD_URL is not of type str: {type(UPD_URL)}" - ) + raise TypeError(f"UPD_URL is not of type str: {type(UPD_URL)}") if type(STRIP_COMM) != str: raise TypeError( f"STRIP_COMM is not of type str: {type(STRIP_COMM)}" @@ -131,7 +103,7 @@ def __init__( self.STRIP_COMM = STRIP_COMM @staticmethod - def concat_url(url_chunks: List[str] = None) -> str: + def concat_url(url_chunks: list[str]) -> str: """ Concatenate a list of strings into a single URL, and return as a single string. @@ -158,7 +130,34 @@ def concat_url(url_chunks: List[str] = None) -> str: else: return url_chunks[0] + path - def gen_latest_rib_fn(self) -> str: + @staticmethod + def from_dict(args: dict) -> "mrt_archive": + if not args: + raise ValueError(f"args is required") + + return mrt_archive( + BASE_URL=args["BASE_URL"], + ENABLED=args["ENABLED"], + MRT_DIR=args["MRT_DIR"], + MRT_EXT=args["MRT_EXT"], + NAME=args["NAME"], + RIB_GLOB=args["RIB_GLOB"], + RIB_INTERVAL=args["RIB_INTERVAL"], + RIB_KEY=args["RIB_KEY"], + RIB_OFFSET=args["RIB_OFFSET"], + RIB_PREFIX=args["RIB_PREFIX"], + RIB_URL=args["RIB_URL"], + TYPE=args["TYPE"], + UPD_GLOB=args["UPD_GLOB"], + UPD_INTERVAL=args["UPD_INTERVAL"], + UPD_KEY=args["UPD_KEY"], + UPD_OFFSET=args["UPD_OFFSET"], + UPD_PREFIX=args["UPD_PREFIX"], + UPD_URL=args["UPD_URL"], + STRIP_COMM=args["STRIP_COMM"], + ) + + def gen_latest_rib_fn(self: "mrt_archive") -> str: """ Generate and return the filename for the newest/most recent RIB dump from this object's archive. @@ -172,7 +171,7 @@ def gen_latest_rib_fn(self) -> str: else: raise ValueError(f"Unknown MRT archive type {self.TYPE}") - def gen_latest_rib_fn_as57355(self) -> str: + def gen_latest_rib_fn_as57355(self: "mrt_archive") -> str: """ Generate and return the filename for the newest/most recent RIB dump from an AS57355 MRT archive. @@ -194,21 +193,20 @@ def gen_latest_rib_fn_as57355(self) -> str: mod = hours % (self.RIB_INTERVAL // 60) if mod == 0: h_delta = datetime.timedelta( - hours = (self.RIB_INTERVAL // 60) + (self.RIB_OFFSET // 60) + hours=(self.RIB_INTERVAL // 60) + (self.RIB_OFFSET // 60) ) else: h_delta = datetime.timedelta( - hours = (self.RIB_INTERVAL // 60) + mod + (self.RIB_OFFSET // 60) + hours=(self.RIB_INTERVAL // 60) + mod + (self.RIB_OFFSET // 60) ) ymd_hm = datetime.datetime.strftime( - datetime.datetime.now() - h_delta, - "%Y%m%d.%H00" + datetime.datetime.now() - h_delta, "%Y%m%d.%H00" ) return self.RIB_PREFIX + ymd_hm + "." + self.MRT_EXT - def gen_latest_rib_fn_ripe(self) -> str: + def gen_latest_rib_fn_ripe(self: "mrt_archive") -> str: """ Generate and return the filename for the newest/most recent RIB dump from a RIPE MRT archive. @@ -231,21 +229,20 @@ def gen_latest_rib_fn_ripe(self) -> str: mod = hours % (self.RIB_INTERVAL // 60) if mod == 0: h_delta = datetime.timedelta( - hours = (self.RIB_INTERVAL // 60) + (self.RIB_OFFSET // 60) + hours=(self.RIB_INTERVAL // 60) + (self.RIB_OFFSET // 60) ) else: h_delta = datetime.timedelta( - hours = (self.RIB_INTERVAL // 60) + mod + (self.RIB_OFFSET // 60) + hours=(self.RIB_INTERVAL // 60) + mod + (self.RIB_OFFSET // 60) ) ymd_hm = datetime.datetime.strftime( - datetime.datetime.now() - h_delta, - "%Y%m%d.%H00" + datetime.datetime.now() - h_delta, "%Y%m%d.%H00" ) return self.RIB_PREFIX + ymd_hm + "." + self.MRT_EXT - def gen_latest_rib_fn_rv(self) -> str: + def gen_latest_rib_fn_rv(self: "mrt_archive") -> str: """ Generate and return the filename for the newest/most recent RIB dump from a route-views MRT archive. @@ -267,21 +264,20 @@ def gen_latest_rib_fn_rv(self) -> str: mod = hours % (self.RIB_INTERVAL // 60) if mod == 0: h_delta = datetime.timedelta( - hours = (self.RIB_INTERVAL // 60) + (self.RIB_OFFSET // 60) + hours=(self.RIB_INTERVAL // 60) + (self.RIB_OFFSET // 60) ) else: h_delta = datetime.timedelta( - hours = (self.RIB_INTERVAL // 60) + mod + (self.RIB_OFFSET // 60) + hours=(self.RIB_INTERVAL // 60) + mod + (self.RIB_OFFSET // 60) ) ymd_hm = datetime.datetime.strftime( - datetime.datetime.now() - h_delta, - "%Y%m%d.%H00" + datetime.datetime.now() - h_delta, "%Y%m%d.%H00" ) return self.RIB_PREFIX + ymd_hm + "." + self.MRT_EXT - def gen_latest_upd_fn(self) -> str: + def gen_latest_upd_fn(self: "mrt_archive") -> str: """ Generate and return the filename for the newest/most recent UPDATE dump from a this object's archive. @@ -295,7 +291,7 @@ def gen_latest_upd_fn(self) -> str: else: raise ValueError(f"Unknown MRT archive type {self.TYPE}") - def gen_latest_upd_fn_as57355(self) -> str: + def gen_latest_upd_fn_as57355(self: "mrt_archive") -> str: """ Generate and return the filename of the newest/most recent UPDATE dump for an AS57355 MRT archive. @@ -320,23 +316,26 @@ def gen_latest_upd_fn_as57355(self) -> str: If this machine is in a different timezone to the archive server, an additional offset is required, UPD_OFFSET. """ - minutes = int(datetime.datetime.strftime(datetime.datetime.now(), "%M")) + minutes = int( + datetime.datetime.strftime(datetime.datetime.now(), "%M") + ) mod = minutes % self.UPD_INTERVAL if mod == 0: m_delta = datetime.timedelta(minutes=(self.UPD_INTERVAL * 2)) else: - m_delta = datetime.timedelta(minutes=((self.UPD_INTERVAL * 2) + mod)) + m_delta = datetime.timedelta( + minutes=((self.UPD_INTERVAL * 2) + mod) + ) h_delta = datetime.timedelta(minutes=self.UPD_OFFSET) ymd_hm = datetime.datetime.strftime( - datetime.datetime.now() - h_delta - m_delta, - cfg.TIME_FORMAT + datetime.datetime.now() - h_delta - m_delta, cfg.TIME_FORMAT ) return self.UPD_PREFIX + ymd_hm + "." + self.MRT_EXT - def gen_latest_upd_fn_ripe(self) -> str: + def gen_latest_upd_fn_ripe(self: "mrt_archive") -> str: """ Generate and return the filename of the newest/most recent UPDATE dump for a RIPE MRT archive. @@ -362,21 +361,24 @@ def gen_latest_upd_fn_ripe(self) -> str: If this machine is in a different timezone to the archive server, an additional offset is required, UPD_OFFSET. """ - minutes = int(datetime.datetime.strftime(datetime.datetime.now(), "%M")) + minutes = int( + datetime.datetime.strftime(datetime.datetime.now(), "%M") + ) mod = minutes % self.UPD_INTERVAL if mod == 0: m_delta = datetime.timedelta(minutes=(self.UPD_INTERVAL * 2)) else: - m_delta = datetime.timedelta(minutes=((self.UPD_INTERVAL * 2) + mod)) + m_delta = datetime.timedelta( + minutes=((self.UPD_INTERVAL * 2) + mod) + ) h_delta = datetime.timedelta(minutes=self.UPD_OFFSET) ymd_hm = datetime.datetime.strftime( - datetime.datetime.now() - h_delta - m_delta, - cfg.TIME_FORMAT + datetime.datetime.now() - h_delta - m_delta, cfg.TIME_FORMAT ) return self.UPD_PREFIX + ymd_hm + "." + self.MRT_EXT - def gen_latest_upd_fn_rv(self) -> str: + def gen_latest_upd_fn_rv(self: "mrt_archive") -> str: """ Generate and return the filename of the newest/most recent UPDATE dump for a route-views MRT archive. @@ -402,12 +404,16 @@ def gen_latest_upd_fn_rv(self) -> str: If this machine is in a different timezone to the archive server, an additional offset is required, RV_UPD_OFFSET. """ - minutes = int(datetime.datetime.strftime(datetime.datetime.now(), "%M")) + minutes = int( + datetime.datetime.strftime(datetime.datetime.now(), "%M") + ) mod = minutes % self.UPD_INTERVAL if mod == 0: m_delta = datetime.timedelta(minutes=(self.UPD_INTERVAL * 2)) else: - m_delta = datetime.timedelta(minutes=((self.UPD_INTERVAL * 2) + mod)) + m_delta = datetime.timedelta( + minutes=((self.UPD_INTERVAL * 2) + mod) + ) h_delta = datetime.timedelta(minutes=self.UPD_OFFSET) @@ -416,32 +422,28 @@ def gen_latest_upd_fn_rv(self) -> str: ) return self.UPD_PREFIX + ymd_hm + "." + self.MRT_EXT - def gen_rib_fn_date(self, ymd_hm: str = None) -> str: + def gen_rib_fn_date(self: "mrt_archive", ymd_hm: str) -> str: """ Generate the filename of a RIB MRT file, for the given date and time. This function is MRT archive type agnostic. """ if not ymd_hm: - raise ValueError( - f"Missing required arguments: ymd_hm={ymd_hm}" - ) + raise ValueError(f"Missing required arguments: ymd_hm={ymd_hm}") mrt_archive.valid_ymd_hm(ymd_hm) return f"{self.RIB_PREFIX}{ymd_hm}.{self.MRT_EXT}" - def gen_rib_fns_day(self, ymd: str = None) -> List[str]: + def gen_rib_fns_day(self: "mrt_archive", ymd: str) -> list[str]: """ Generate a list of all the RIB MRT filenames for a this MRT archive, for a specific day. This function is MRT archive type agnostic. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}") mrt_archive.valid_ymd(ymd) filenames = [] minutes = 0 - while(minutes < 1440): + while minutes < 1440: datetime.timedelta(minutes=minutes) hh = f"{minutes//60:02}" mm = f"{minutes%60:02}" @@ -450,13 +452,15 @@ def gen_rib_fns_day(self, ymd: str = None) -> List[str]: minutes += self.RIB_INTERVAL return filenames - def gen_rib_fns_range(self, end_date: str = None, start_date: str = None) -> List[str]: + def gen_rib_fns_range( + self: "mrt_archive", end_date: str, start_date: str + ) -> list[str]: """ Generate and return a list of filenames for a range of RIB MRT dumps, between the given start and end times inclusive, for the local MRT archive type. This function is agnostics of MRT archive type. """ - if (not start_date or not end_date): + if not start_date or not end_date: raise ValueError( f"Missing required options: start_date={start_date}, " f"end_date={end_date}" @@ -476,28 +480,26 @@ def gen_rib_fns_range(self, end_date: str = None, start_date: str = None) -> Lis for i in range(0, mins + 2): delta = datetime.timedelta(minutes=(i * 1)) ymd_hm = datetime.datetime.strftime(start + delta, cfg.TIME_FORMAT) - hm = int(ymd_hm.split(".")[1][:2])*60 + hm = int(ymd_hm.split(".")[1][:2]) * 60 hm += int(ymd_hm.split(".")[1][2:]) - if (hm % self.RIB_INTERVAL == 0): + if hm % self.RIB_INTERVAL == 0: filenames.append(self.gen_rib_fn_date(ymd_hm)) return filenames - def gen_rib_key(self, ymd: str = None) -> str: + def gen_rib_key(self: "mrt_archive", ymd: str) -> str: """ Generate the redis DB key used to store RIB stats for this archive, on a specific day. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}") self.valid_ymd(ymd) return self.RIB_KEY + ":" + ymd - def gen_rib_url(self, filename: str = None) -> str: + def gen_rib_url(self: "mrt_archive", filename: str) -> str: """ Generate the URL for a RIB MRT dump, based on the given MRT file name, for the local MRT archive type. @@ -516,23 +518,21 @@ def gen_rib_url(self, filename: str = None) -> str: else: raise ValueError(f"Unknown MRT archive type {self.TYPE}") - def gen_rib_url_as57355(self, filename: str = None) -> str: + def gen_rib_url_as57355(self: "mrt_archive", filename: str) -> str: """ Generate the URL for a given RIB MRT file, for an AS57355 MRT archive. """ if not filename: - raise ValueError( - f"Missing required options: filename{filename}" - ) + raise ValueError(f"Missing required options: filename{filename}") - if filename[0:len(self.RIB_PREFIX)] != self.RIB_PREFIX: + if filename[0 : len(self.RIB_PREFIX)] != self.RIB_PREFIX: raise ValueError( f"MRT file prefix {filename[0:len(self.RIB_PREFIX)]} " f"is not {self.RIB_PREFIX}" ) ym = filename.split(".")[1][0:6] - ymd_hm = '.'.join(filename.split(".")[1:3]) + ymd_hm = ".".join(filename.split(".")[1:3]) mrt_archive.valid_ym(ym) mrt_archive.valid_ymd_hm(ymd_hm) @@ -547,13 +547,15 @@ def gen_rib_url_as57355(self, filename: str = None) -> str: [self.BASE_URL, "/", self.RIB_URL, "/", filename] ) - def gen_rib_url_range(self, end_date: str = None, start_date: str = None) -> List[str]: + def gen_rib_url_range( + self: "mrt_archive", end_date: str, start_date: str + ) -> list[str]: """ Generate and return a list of URLs for a range of RIB MRT dumps, between the given start and end times inclusive, for the local MRT archive type. This function is agnostic of MRT archive type. """ - if (not start_date or not end_date): + if not start_date or not end_date: raise ValueError( f"Missing required options: start_date={start_date}, " f"end_date={end_date}" @@ -567,8 +569,10 @@ def gen_rib_url_range(self, end_date: str = None, start_date: str = None) -> Lis f"End date {end_date} is before start date {start_date}" ) - urls: List[str] = [] - filenames: List[str] = self.gen_rib_fns_range(end_date=end_date, start_date=start_date) + urls: list[str] = [] + filenames: list[str] = self.gen_rib_fns_range( + end_date=end_date, start_date=start_date + ) if not filenames: return urls @@ -577,23 +581,21 @@ def gen_rib_url_range(self, end_date: str = None, start_date: str = None) -> Lis return urls - def gen_rib_url_ripe(self, filename: str = None) -> str: + def gen_rib_url_ripe(self: "mrt_archive", filename: str) -> str: """ Generate the URL for a given RIB MRT file, for a RIPE MRT archive. """ if not filename: - raise ValueError( - f"Missing required options: filename{filename}" - ) + raise ValueError(f"Missing required options: filename{filename}") - if filename[0:len(self.RIB_PREFIX)] != self.RIB_PREFIX: + if filename[0 : len(self.RIB_PREFIX)] != self.RIB_PREFIX: raise ValueError( f"MRT file prefix {filename[0:len(self.RIB_PREFIX)]} " f"is not {self.RIB_PREFIX}" ) ym = filename.split(".")[1][0:6] - ymd_hm = '.'.join(filename.split(".")[1:3]) + ymd_hm = ".".join(filename.split(".")[1:3]) mrt_archive.valid_ym(ym) mrt_archive.valid_ymd_hm(ymd_hm) @@ -605,27 +607,31 @@ def gen_rib_url_ripe(self, filename: str = None) -> str: ) return mrt_archive.concat_url( - [self.BASE_URL, ym[0:4] + "." + ym[4:] + "/", self.RIB_URL, "/", filename] + [ + self.BASE_URL, + ym[0:4] + "." + ym[4:] + "/", + self.RIB_URL, + "/", + filename, + ] ) - def gen_rib_url_rv(self, filename: str = None) -> str: + def gen_rib_url_rv(self: "mrt_archive", filename: str) -> str: """ Generate the URL for a given RIB MRT file, from a route-views MRT archive. """ if not filename: - raise ValueError( - f"Missing required options: filename{filename}" - ) + raise ValueError(f"Missing required options: filename{filename}") - if filename[0:len(self.RIB_PREFIX)] != self.RIB_PREFIX: + if filename[0 : len(self.RIB_PREFIX)] != self.RIB_PREFIX: raise ValueError( f"MRT file prefix {filename[0:len(self.RIB_PREFIX)]} " f"is not {self.RIB_PREFIX}" ) ym = filename.split(".")[1][0:6] - ymd_hm = '.'.join(filename.split(".")[1:3]) + ymd_hm = ".".join(filename.split(".")[1:3]) mrt_archive.valid_ym(ym) mrt_archive.valid_ymd_hm(ymd_hm) @@ -643,33 +649,29 @@ def gen_rib_url_rv(self, filename: str = None) -> str: [self.BASE_URL, y + "." + m + "/", self.RIB_URL, "/", filename] ) - def gen_upd_fn_date(self, ymd_hm: str = None) -> str: + def gen_upd_fn_date(self: "mrt_archive", ymd_hm: str) -> str: """ Generate the filename of an UPDATE MRT file, for the given date and time. This is MRT archive type agnostic. """ if not ymd_hm: - raise ValueError( - f"Missing required arguments: ymd_hm={ymd_hm}" - ) + raise ValueError(f"Missing required arguments: ymd_hm={ymd_hm}") mrt_archive.valid_ymd_hm(ymd_hm) return f"{self.UPD_PREFIX}{ymd_hm}.{self.MRT_EXT}" - def gen_upd_fns_day(self, ymd: str = None) -> List[str]: + def gen_upd_fns_day(self: "mrt_archive", ymd: str) -> list[str]: """ Generate a list of all the UPDATE MRT filename for a specific day, for a specific MRT archive. This function is MRT archive type agnostic. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}") mrt_archive.valid_ymd(ymd) filenames = [] minutes = 0 - while(minutes < 1440): + while minutes < 1440: datetime.timedelta(minutes=minutes) hh = f"{minutes//60:02}" mm = f"{minutes%60:02}" @@ -677,13 +679,15 @@ def gen_upd_fns_day(self, ymd: str = None) -> List[str]: minutes += self.UPD_INTERVAL return filenames - def gen_upd_fns_range(self, end_date: str = None, start_date: str = None) -> List[str]: + def gen_upd_fns_range( + self: "mrt_archive", end_date: str, start_date: str + ) -> list[str]: """ Generate and return a list of filenames for a range of UPDATE MRT dumps, between the given start and end times inclusive, for the local MRT archive type. This function is agnostics of MRT archive type. """ - if (not start_date or not end_date): + if not start_date or not end_date: raise ValueError( f"Missing required options: start_date={start_date}, " f"end_date={end_date}" @@ -703,22 +707,20 @@ def gen_upd_fns_range(self, end_date: str = None, start_date: str = None) -> Lis for i in range(0, mins + 2): delta = datetime.timedelta(minutes=(i * 1)) ymd_hm = datetime.datetime.strftime(start + delta, cfg.TIME_FORMAT) - hm = int(ymd_hm.split(".")[1][:2])*60 + hm = int(ymd_hm.split(".")[1][:2]) * 60 hm += int(ymd_hm.split(".")[1][2:]) - if (hm % self.UPD_INTERVAL == 0): + if hm % self.UPD_INTERVAL == 0: filenames.append(self.gen_upd_fn_date(ymd_hm)) return filenames - def gen_upd_key(self, ymd: str = None) -> str: + def gen_upd_key(self: "mrt_archive", ymd: str) -> str: """ Generate the redis DB key used to store update stats for this archive, on a specific day. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}") if type(ymd) != str: raise TypeError( @@ -729,7 +731,7 @@ def gen_upd_key(self, ymd: str = None) -> str: return self.UPD_KEY + ":" + ymd - def gen_upd_url(self, filename: str = None) -> str: + def gen_upd_url(self: "mrt_archive", filename: str) -> str: """ Generate the URL from a given update MRT file, for a specific MRT archive. @@ -748,24 +750,22 @@ def gen_upd_url(self, filename: str = None) -> str: else: raise ValueError(f"Unknown MRT archive type {self.TYPE}") - def gen_upd_url_as57355(self, filename: str = None) -> str: + def gen_upd_url_as57355(self: "mrt_archive", filename: str) -> str: """ Return the URL from a given UPDATE MRT filename, from an AS57355 MRT archive. """ if not filename: - raise ValueError( - f"Missing required options: filename{filename}" - ) + raise ValueError(f"Missing required options: filename{filename}") - if filename[0:len(self.UPD_PREFIX)] != self.UPD_PREFIX: + if filename[0 : len(self.UPD_PREFIX)] != self.UPD_PREFIX: raise ValueError( f"MRT file prefix {filename[0:len(self.UPD_PREFIX)]} " f"is not {self.UPD_PREFIX}" ) ym = filename.split(".")[0][0:6] - ymd_hm = '.'.join(filename.split(".")[0:2]) + ymd_hm = ".".join(filename.split(".")[0:2]) mrt_archive.valid_ym(ym) mrt_archive.valid_ymd_hm(ymd_hm) @@ -783,13 +783,15 @@ def gen_upd_url_as57355(self, filename: str = None) -> str: [self.BASE_URL, "/", self.UPD_URL, "/", filename] ) - def gen_upd_url_range(self, end_date: str = None, start_date: str = None) -> List[str]: + def gen_upd_url_range( + self: "mrt_archive", end_date: str, start_date: str + ) -> list[str]: """ Generate a and return a list of URLs for a range of UPDATE MRT dumps, between the given start and end times inclusive, for the local MRT archive type. This function is archive type agnostic. """ - if (not start_date or not end_date): + if not start_date or not end_date: raise ValueError( f"Missing required options: start_date={start_date}, " f"end_date={end_date}" @@ -803,8 +805,10 @@ def gen_upd_url_range(self, end_date: str = None, start_date: str = None) -> Lis f"End date {end_date} is before start date {start_date}" ) - urls: List[str] = [] - filenames: List[str] = self.gen_upd_fns_range(end_date=end_date, start_date=start_date) + urls: list[str] = [] + filenames: list[str] = self.gen_upd_fns_range( + end_date=end_date, start_date=start_date + ) if not filenames: return urls @@ -812,25 +816,23 @@ def gen_upd_url_range(self, end_date: str = None, start_date: str = None) -> Lis urls.append(self.gen_upd_url(filename)) return urls - - def gen_upd_url_ripe(self, filename: str = None) -> str: + + def gen_upd_url_ripe(self: "mrt_archive", filename: str) -> str: """ Generate the URL from a given UPDATE MRT filename, for a RIPE MRT archive. """ if not filename: - raise ValueError( - f"Missing required options: filename{filename}" - ) + raise ValueError(f"Missing required options: filename{filename}") - if filename[0:len(self.UPD_PREFIX)] != self.UPD_PREFIX: + if filename[0 : len(self.UPD_PREFIX)] != self.UPD_PREFIX: raise ValueError( f"MRT file prefix {filename[0:len(self.UPD_PREFIX)]} " f"is not {self.UPD_PREFIX}" ) ym = filename.split(".")[1][0:6] - ymd_hm = '.'.join(filename.split(".")[1:3]) + ymd_hm = ".".join(filename.split(".")[1:3]) mrt_archive.valid_ym(ym) mrt_archive.valid_ymd_hm(ymd_hm) @@ -842,27 +844,30 @@ def gen_upd_url_ripe(self, filename: str = None) -> str: ) return mrt_archive.concat_url( - [self.BASE_URL, ym[0:4] + "." + ym[4:] + "/", self.UPD_URL, filename] + [ + self.BASE_URL, + ym[0:4] + "." + ym[4:] + "/", + self.UPD_URL, + filename, + ] ) - def gen_upd_url_rv(self, filename: str = None) -> str: + def gen_upd_url_rv(self: "mrt_archive", filename: str) -> str: """ Return the URL from a given UPDATE MRT filename, from a route-views MRT archive. """ if not filename: - raise ValueError( - f"Missing required options: filename{filename}" - ) + raise ValueError(f"Missing required options: filename{filename}") - if filename[0:len(self.UPD_PREFIX)] != self.UPD_PREFIX: + if filename[0 : len(self.UPD_PREFIX)] != self.UPD_PREFIX: raise ValueError( f"MRT file prefix {filename[0:len(self.UPD_PREFIX)]} " f"is not {self.UPD_PREFIX}" ) ym = filename.split(".")[1][0:6] - ymd_hm = '.'.join(filename.split(".")[1:3]) + ymd_hm = ".".join(filename.split(".")[1:3]) mrt_archive.valid_ym(ym) mrt_archive.valid_ymd_hm(ymd_hm) @@ -880,7 +885,9 @@ def gen_upd_url_rv(self, filename: str = None) -> str: [self.BASE_URL, y + "." + m + "/", self.UPD_URL, filename] ) - def ts_from_filename(self, filename: str = None) -> datetime.datetime: + def ts_from_filename( + self: "mrt_archive", filename: str + ) -> datetime.datetime: """ Extract the ymd.hm timestamp from an MRT filename and return it. This function is MRT archive type agnostic. @@ -899,7 +906,9 @@ def ts_from_filename(self, filename: str = None) -> datetime.datetime: else: raise ValueError(f"Unknown MRT archive type {self.TYPE}") - def ts_from_filename_as57355(self, filename: str = None) -> datetime.datetime: + def ts_from_filename_as57355( + self: "mrt_archive", filename: str + ) -> datetime.datetime: """ Extract the ymd.hm timestamp from an MRT filename and return it. This function is specific to an AS57355 MRT file. @@ -909,10 +918,12 @@ def ts_from_filename_as57355(self, filename: str = None) -> datetime.datetime: f"Missing required arguments: filename={filename}" ) - raw_ts = '.'.join(os.path.basename(filename).split(".")[0:2]) + raw_ts = ".".join(os.path.basename(filename).split(".")[0:2]) return datetime.datetime.strptime(raw_ts, cfg.TIME_FORMAT) - def ts_from_filename_ripe(self, filename: str = None) -> datetime.datetime: + def ts_from_filename_ripe( + self: "mrt_archive", filename: str + ) -> datetime.datetime: """ Extract the ymd.hm timestamp from an MRT filename and return it. This function is specific to a RIPE MRT file. @@ -922,10 +933,12 @@ def ts_from_filename_ripe(self, filename: str = None) -> datetime.datetime: f"Missing required arguments: filename={filename}" ) - raw_ts = '.'.join(os.path.basename(filename).split(".")[1:3]) + raw_ts = ".".join(os.path.basename(filename).split(".")[1:3]) return datetime.datetime.strptime(raw_ts, cfg.TIME_FORMAT) - def ts_from_filename_rv(self, filename: str = None) -> datetime.datetime: + def ts_from_filename_rv( + self: "mrt_archive", filename: str + ) -> datetime.datetime: """ Extract the ymd.hm timestamp from an MRT filename and return it. This function is specific to a Route-Views MRT file. @@ -935,19 +948,17 @@ def ts_from_filename_rv(self, filename: str = None) -> datetime.datetime: f"Missing required arguments: filename={filename}" ) - raw_ts = '.'.join(os.path.basename(filename).split(".")[1:3]) + raw_ts = ".".join(os.path.basename(filename).split(".")[1:3]) return datetime.datetime.strptime(raw_ts, cfg.TIME_FORMAT) @staticmethod - def valid_ym(ym: str = None): + def valid_ym(ym: str) -> None: """ Check if the ym string is correctly formated. Must be "yyyymm" e.g., "202201". """ if not ym: - raise ValueError( - f"Missing required arguments: ym={ym}" - ) + raise ValueError(f"Missing required arguments: ym={ym}") if type(ym) != str: raise TypeError( @@ -958,24 +969,20 @@ def valid_ym(ym: str = None): No MRTs available from before 1999, and I assume this conde won't be running in 2030, I'm a realist :( """ - if not re.match( - r"^(1999|20[0-2][0-9])(0[1-9]|1[0-2])$", ym - ): + if not re.match(r"^(1999|20[0-2][0-9])(0[1-9]|1[0-2])$", ym): raise ValueError( f"Invalid year and month format: {ym}. " "Must be yyyymm e.g., 202201." ) @staticmethod - def valid_ymd(ymd: str = None): + def valid_ymd(ymd: str) -> None: """ Check if the ymd string is correctly formated. Must be "yyyymm" e.g., "20220101". """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}") if type(ymd) != str: raise TypeError( @@ -988,7 +995,7 @@ def valid_ymd(ymd: str = None): """ if not re.match( r"^(1999|20[0-2][0-9])(0[1-9]|1[0-2])(0[1-9]|[1-2][0-9]|3[0-1])$", - ymd + ymd, ): raise ValueError( f"Invalid year, month, day format: {ymd}. " @@ -996,15 +1003,13 @@ def valid_ymd(ymd: str = None): ) @staticmethod - def valid_ymd_hm(ymd_hm: str = None): + def valid_ymd_hm(ymd_hm: str) -> None: """ Check if the ymd_hm string is correctly formated. Must be "yyyymm" e.g., "20220101.1000". """ if not ymd_hm: - raise ValueError( - f"Missing required arguments: ymd_hm={ymd_hm}" - ) + raise ValueError(f"Missing required arguments: ymd_hm={ymd_hm}") if type(ymd_hm) != str: raise TypeError( @@ -1018,15 +1023,18 @@ def valid_ymd_hm(ymd_hm: str = None): """ if not re.match( - (r"^(1999|20[0-2][0-9])(0[1-9]|1[0-2])(0[1-9]|[1-2][0-9]|3[0-1])\." - r"([0-1][0-9]|2[0-3])([0-5][0-9])$"), ymd_hm + ( + r"^(1999|20[0-2][0-9])(0[1-9]|1[0-2])(0[1-9]|[1-2][0-9]|3[0-1])\." + r"([0-1][0-9]|2[0-3])([0-5][0-9])$" + ), + ymd_hm, ): raise ValueError( f"Invalid year, month, day, hour, minute format: {ymd_hm}. " "Must be yyyymmdd.hhmm e.g., 20220115.1045." ) - def ymd_from_file_path(self, file_path: str = None) -> str: + def ymd_from_file_path(self: "mrt_archive", file_path: str) -> str: """ Return the ymd from the filename. """ @@ -1040,7 +1048,7 @@ def ymd_from_file_path(self, file_path: str = None) -> str: f"file_path is not of type string: {type(file_path)}" ) - if (self.TYPE == "RV" or self.TYPE == "RIPE"): + if self.TYPE == "RV" or self.TYPE == "RIPE": ymd = os.path.basename(file_path).split(".")[1] elif self.TYPE == "AS57355": ymd = os.path.basename(file_path).split(".")[0] diff --git a/dnas/dnas/mrt_archives.py b/dnas/dnas/mrt_archives.py index 78d01a4..fe7c019 100644 --- a/dnas/dnas/mrt_archives.py +++ b/dnas/dnas/mrt_archives.py @@ -1,6 +1,6 @@ import logging import os -from typing import Union, Literal +from typing import Literal, Union from dnas.config import config as cfg from dnas.mrt_archive import mrt_archive @@ -10,34 +10,13 @@ class mrt_archives: def __init__(self) -> None: self.archives = [] + arch: dict for arch in cfg.MRT_ARCHIVES: - self.archives.append( - mrt_archive( - BASE_URL=arch["BASE_URL"], - ENABLED=arch["ENABLED"], - MRT_DIR=arch["MRT_DIR"], - MRT_EXT=arch["MRT_EXT"], - NAME=arch["NAME"], - RIB_GLOB=arch["RIB_GLOB"], - RIB_INTERVAL=arch["RIB_INTERVAL"], - RIB_KEY=arch["RIB_KEY"], - RIB_OFFSET=arch["RIB_OFFSET"], - RIB_PREFIX=arch["RIB_PREFIX"], - RIB_URL=arch["RIB_URL"], - TYPE=arch["TYPE"], - UPD_GLOB=arch["UPD_GLOB"], - UPD_INTERVAL=arch["UPD_INTERVAL"], - UPD_KEY=arch["UPD_KEY"], - UPD_OFFSET=arch["UPD_OFFSET"], - UPD_PREFIX=arch["UPD_PREFIX"], - UPD_URL=arch["UPD_URL"], - STRIP_COMM=arch["STRIP_COMM"], - ) - ) + self.archives.append(mrt_archive.from_dict(args=arch)) def arch_from_file_path( - self, file_path: str = None - ) -> Union['mrt_archive', Literal[False]]: + self: "mrt_archives", file_path: str + ) -> Union["mrt_archive", Literal[False]]: """ Return the MRT archive the file came from, based on the file path. """ @@ -59,8 +38,8 @@ def arch_from_file_path( return False def arch_from_url( - self, url: str = None - ) -> Union['mrt_archive', Literal[False]]: + self: "mrt_archives", url: str + ) -> Union["mrt_archive", Literal[False]]: """ Return the MRT archive the URL belongs to, based on the url. """ @@ -96,7 +75,7 @@ def arch_from_url( logging.error(f"Couldn't match {url} to any MRT archive") return False - def get_arch_option(self, file_path: str = None, opt: str = None) -> str: + def get_arch_option(self: "mrt_archives", file_path: str, opt: str) -> str: """ Return the value of an MRT archive attribute, based on the file name. """ @@ -119,7 +98,7 @@ def get_arch_option(self, file_path: str = None, opt: str = None) -> str: return getattr(arch, opt) - def get_day_key(self, file_path: str = None) -> str: + def get_day_key(self: "mrt_archives", file_path: str) -> str: """ Return the redis DB key for the specific MRT archive and specific day the file path relates to. @@ -146,7 +125,7 @@ def get_day_key(self, file_path: str = None) -> str: else: return arch.gen_upd_key(ymd) - def is_rib_from_filename(self, file_path: str = None) -> bool: + def is_rib_from_filename(self: "mrt_archives", file_path: str) -> bool: """ Return True if this is a RIB dump, else False to indicate UPDATE dump. """ diff --git a/dnas/dnas/mrt_entry.py b/dnas/dnas/mrt_entry.py index f4cfe42..32b1e5c 100644 --- a/dnas/dnas/mrt_entry.py +++ b/dnas/dnas/mrt_entry.py @@ -1,6 +1,6 @@ import datetime import json -from typing import List, Optional, Set +import typing from dnas.config import config as cfg @@ -14,20 +14,19 @@ class mrt_entry: def __init__( self, advt: int = 0, - as_path: List[str] = [], - comm_set: List[str] = [], - filename: str = None, + as_path: list[str] = [], + comm_set: list[str] = [], + filename: str = "", med: int = cfg.MISSING_MED, - next_hop: str = None, - prefix: str = None, - origin_asns: Set[str] = set(), - peer_asn: str = None, - unknown_attrs: Set[int] = set(), - timestamp: str = None, + next_hop: str = "", + prefix: str = "", + origin_asns: set[str] = set(), + peer_asn: str = "", + unknown_attrs: set[int] = set(), + timestamp: str = "", updates: int = 0, withdraws: int = 0, ) -> None: - self.advt = advt self.as_path = as_path self.comm_set = comm_set @@ -42,13 +41,15 @@ def __init__( self.withdraws = withdraws self.unknown_attrs = unknown_attrs - def equal_to(self, mrt_e: 'mrt_entry' = None, meta: bool = False) -> bool: + def equal_to( + self: "mrt_entry", mrt_e: "mrt_entry", meta: bool = False + ) -> bool: """ Return True if this MRT stat entry obj is the same as mrt_e, else False. Comparing meta data like filename and timestamp is option. """ if not mrt_e: - raise ValueError(f"Missing required arguments: mrt_e={mrt_e}") + raise ValueError(f"Missing required arguments: mrt_e") if type(mrt_e) != mrt_entry: raise TypeError(f"mrt_e is not a stats entry: {type(mrt_e)}") @@ -98,14 +99,12 @@ def equal_to(self, mrt_e: 'mrt_entry' = None, meta: bool = False) -> bool: return True - def from_json(self, json_str: str = None): + def from_json(self: "mrt_entry", json_str: str) -> None: """ Parse a JSON str into this MRT stats entry obj. """ if not json_str: - raise ValueError( - f"Missing required arguments: json_str={json_str}" - ) + raise ValueError(f"Missing required arguments: json_str") if type(json_str) != str: raise TypeError(f"json_str is not a string: {type(json_str)}") @@ -114,22 +113,27 @@ def from_json(self, json_str: str = None): self.advt = json_data["advt"] self.as_path = json_data["as_path"] self.comm_set = json_data["comm_set"] + """ + Convert between JSON "null" and empty string "" + In the past we used str or None (which was serialsed as null), + not str only. + """ self.filename = ( - json_data["filename"] if ("filename" in json_data) else None - ) ##### FIX ME - self.med = ( - json_data["med"] if ("med" in json_data) else None - ) ##### FIX ME - self.next_hop = json_data["next_hop"] - self.prefix = json_data["prefix"] + json_data["filename"] if ("filename" in json_data) else "" + ) + self.med = json_data["med"] if ("med" in json_data) else None + self.next_hop = json_data["next_hop"] if json_data["next_hop"] else "" + self.prefix = json_data["prefix"] if json_data["prefix"] else "" self.origin_asns = set(json_data["origin_asns"]) - self.peer_asn = json_data["peer_asn"] + self.peer_asn = json_data["peer_asn"] if json_data["peer_asn"] else "" self.unknown_attrs = ( set(json_data["unknown_attrs"]) if ("unknown_attrs" in json_data) else set() - ) ##### FIX ME - self.timestamp = json_data["timestamp"] + ) + self.timestamp = ( + json_data["timestamp"] if json_data["timestamp"] else "" + ) self.updates = json_data["updates"] self.withdraws = json_data["withdraws"] @@ -140,7 +144,9 @@ def gen_timestamp() -> str: """ return datetime.datetime.now().strftime(cfg.TIME_FORMAT) - def to_json(self, indent: Optional[int] = None) -> str: + def to_json( + self: "mrt_entry", indent: int = cfg.MRT_ENTRY_JSON_INDENT + ) -> str: """ Return this MRT entry obj serialised to a JSON str. """ @@ -161,7 +167,7 @@ def to_json(self, indent: Optional[int] = None) -> str: } return json.dumps(json_data, indent=indent) - def print(self): + def print(self: "mrt_entry") -> None: """ Ugly print this MRT stats entry. """ diff --git a/dnas/dnas/mrt_getter.py b/dnas/dnas/mrt_getter.py index 82a552d..d1c8b9a 100644 --- a/dnas/dnas/mrt_getter.py +++ b/dnas/dnas/mrt_getter.py @@ -1,12 +1,13 @@ import datetime import logging import os -import requests -from typing import List, Literal, Tuple, Union +from typing import Literal, Tuple, Union +import requests from dnas.config import config as cfg from dnas.mrt_archive import mrt_archive + class mrt_getter: """ Class which can be used to get MRT files from public MRT archives. @@ -14,21 +15,17 @@ class mrt_getter: @staticmethod def get_latest_rib( - arch: 'mrt_archive' = None, + arch: "mrt_archive", replace: bool = False, ) -> Tuple[str, str]: """ Download the lastest RIB dump MRT from the given MRT archive. """ if not arch: - raise ValueError( - f"Missing required options: arch={arch}" - ) + raise ValueError(f"Missing required options: arch={arch}") if type(arch) != mrt_archive: - raise TypeError( - f"arch is not an MRT archive: {type(arch)}" - ) + raise TypeError(f"arch is not an MRT archive: {type(arch)}") filename = arch.gen_latest_rib_fn() url = arch.gen_rib_url(filename) @@ -38,21 +35,17 @@ def get_latest_rib( @staticmethod def get_latest_upd( - arch: 'mrt_archive' = None, + arch: "mrt_archive", replace: bool = False, ) -> Tuple[str, str]: """ Download the lastest update MRT file from the given MRT archive. """ if not arch: - raise ValueError( - f"Missing required options: arch={arch}" - ) + raise ValueError(f"Missing required options: arch={arch}") if type(arch) != mrt_archive: - raise TypeError( - f"arch is not an MRT archive: {type(arch)}" - ) + raise TypeError(f"arch is not an MRT archive: {type(arch)}") filename = arch.gen_latest_upd_fn() url = arch.gen_upd_url(filename) @@ -62,11 +55,11 @@ def get_latest_upd( @staticmethod def get_range_rib( - arch: 'mrt_archive' = None, - end_date: str = None, + arch: "mrt_archive", + end_date: str, + start_date: str, replace: bool = False, - start_date: str = None, - ) -> List[Tuple[str, str]]: + ) -> list[Tuple[str, str]]: """ Download a range of RIB MRT dump files from an archive. All RIB MRT files from and inclusive of start_date to and inclusive @@ -75,16 +68,14 @@ def get_range_rib( start_date: In the MRT date format yyyymmdd.hhmm "20220129.0000" end_date: In the MRT date format yyyymmdd.hhmm "20220129.1230" """ - if (not arch or not start_date or not end_date): + if not arch or not start_date or not end_date: raise ValueError( f"Missing required options: arch={arch}, " f"start_date={start_date}, end_date={end_date}" ) if type(arch) != mrt_archive: - raise TypeError( - f"arch is not an MRT archive: {type(arch)}" - ) + raise TypeError(f"arch is not an MRT archive: {type(arch)}") start = datetime.datetime.strptime(start_date, cfg.TIME_FORMAT) end = datetime.datetime.strptime(end_date, cfg.TIME_FORMAT) @@ -100,7 +91,9 @@ def get_range_rib( for filename in filenames: url = arch.gen_rib_url(filename) - outfile = os.path.normpath(arch.MRT_DIR + "/" + os.path.basename(url)) + outfile = os.path.normpath( + arch.MRT_DIR + "/" + os.path.basename(url) + ) if mrt_getter.download_mrt( filename=outfile, replace=replace, url=url @@ -112,11 +105,11 @@ def get_range_rib( @staticmethod def get_range_upd( - arch: 'mrt_archive' = None, - end_date: str = None, + arch: "mrt_archive", + end_date: str, + start_date: str, replace: bool = False, - start_date: str = None, - ) -> List[Tuple[str, str]]: + ) -> list[Tuple[str, str]]: """ Download a range of MRT update dump files from an MRT archive. All update MRT files from and inclusive of start_date to and inclusive @@ -126,16 +119,14 @@ def get_range_upd( end_date: In the MRT date format yyyymmdd.hhmm "20220129.1230" """ - if (not arch or not start_date or not end_date): + if not arch or not start_date or not end_date: raise ValueError( f"Missing required options: arch={arch}, " f"start_date={start_date}, end_date={end_date}" ) if type(arch) != mrt_archive: - raise TypeError( - f"arch is not an MRT archive: {type(arch)}" - ) + raise TypeError(f"arch is not an MRT archive: {type(arch)}") start = datetime.datetime.strptime(start_date, cfg.TIME_FORMAT) end = datetime.datetime.strptime(end_date, cfg.TIME_FORMAT) @@ -151,7 +142,9 @@ def get_range_upd( for filename in filenames: url = arch.gen_upd_url(filename) - outfile = os.path.normpath(arch.MRT_DIR + "/" + os.path.basename(url)) + outfile = os.path.normpath( + arch.MRT_DIR + "/" + os.path.basename(url) + ) if mrt_getter.download_mrt( filename=outfile, replace=replace, url=url @@ -162,7 +155,9 @@ def get_range_upd( return downloaded @staticmethod - def download_mrt(filename: str = None, replace: bool = False, url: str = None) -> Union[str, Literal[False]]: + def download_mrt( + filename: str, url: str, replace: bool = False + ) -> Union[str, Literal[False]]: """ Download an MRT file from the given url, and save it as the given filename. @@ -175,7 +170,7 @@ def download_mrt(filename: str = None, replace: bool = False, url: str = None) - os.makedirs(os.path.dirname(filename), exist_ok=True) - if (not replace and os.path.exists(filename)): + if not replace and os.path.exists(filename): logging.info(f"Not overwriting existing file {filename}") return False @@ -191,7 +186,6 @@ def download_mrt(filename: str = None, replace: bool = False, url: str = None) - except requests.exceptions.ConnectionError as e: logging.info(f"Couldn't connect to MRT server: {e}") raise requests.exceptions.ConnectionError - if req.status_code != 200: logging.info(f"HTTP error: {req.status_code}") @@ -200,7 +194,7 @@ def download_mrt(filename: str = None, replace: bool = False, url: str = None) - logging.error(req.content) req.raise_for_status() - file_len = int(req.headers['Content-length']) + file_len = int(req.headers["Content-length"]) if file_len is None or file_len == 0: logging.error(req.url) @@ -227,9 +221,13 @@ def download_mrt(filename: str = None, replace: bool = False, url: str = None) - f.flush() if rcvd == file_len: - logging.debug(f"Downloaded {rcvd}/{file_len} ({(rcvd/file_len)*100}%)") - elif ((rcvd/file_len)*100)//10 > progress: - logging.debug(f"Downloaded {rcvd}/{file_len} ({(rcvd/file_len)*100:.3}%)") - progress = ((rcvd/file_len)*100)//10 + logging.debug( + f"Downloaded {rcvd}/{file_len} ({(rcvd/file_len)*100}%)" + ) + elif ((rcvd / file_len) * 100) // 10 > progress: + logging.debug( + f"Downloaded {rcvd}/{file_len} ({(rcvd/file_len)*100:.3}%)" + ) + progress = ((rcvd / file_len) * 100) // 10 return filename diff --git a/dnas/dnas/mrt_parser.py b/dnas/dnas/mrt_parser.py index 586e291..ab4359c 100644 --- a/dnas/dnas/mrt_parser.py +++ b/dnas/dnas/mrt_parser.py @@ -1,16 +1,16 @@ import datetime import errno import logging -import mrtparse # type: ignore import operator import os import traceback -from typing import Dict, List, Set +import typing -from dnas.config import config as cfg +import mrtparse # type: ignore from dnas.bogon_asn import bogon_asn from dnas.bogon_attr import bogon_attr from dnas.bogon_ip import bogon_ip +from dnas.config import config as cfg from dnas.mrt_archives import mrt_archives from dnas.mrt_entry import mrt_entry from dnas.mrt_stats import mrt_stats @@ -22,7 +22,7 @@ class mrt_parser: """ @staticmethod - def get_timestamp(filename: str = None) -> str: + def get_timestamp(filename: str) -> str: """ Return the timestamp from the start of an MRT file. """ @@ -48,7 +48,7 @@ def get_timestamp(filename: str = None) -> str: return timestamp @staticmethod - def posix_to_ts(posix: int = None) -> str: + def posix_to_ts(posix: int) -> str: """ Convert the posix timestamp in an MRT dump, to the UTC time in the standard format of MRTs. @@ -61,7 +61,7 @@ def posix_to_ts(posix: int = None) -> str: ) @staticmethod - def parse_rib_dump(filename: str = None) -> 'mrt_stats': + def parse_rib_dump(filename: str) -> "mrt_stats": """ Take filename of RIB dump MRT as input and return an MRT stats obj. """ @@ -83,7 +83,7 @@ def parse_rib_dump(filename: str = None) -> 'mrt_stats': return mrt_s @staticmethod - def parse_upd_dump(filename: str = None) -> 'mrt_stats': + def parse_upd_dump(filename: str) -> "mrt_stats": """ Take filename of UPDATE dump MRT as input and return an MRT stats obj. """ @@ -99,24 +99,24 @@ def parse_upd_dump(filename: str = None) -> 'mrt_stats': We will see the same data again and again, so cache "seen" data to speed up parsing """ - non_bogon_asns: Dict[str, None] = {} - bogon_origin_asns: List[mrt_entry] = [] - bogon_prefix_entries: List[mrt_entry] = [] - highest_med_prefixes: List[mrt_entry] = [] - invalid_len_entries: List[mrt_entry] = [] - longest_as_path: List[mrt_entry] = [] - longest_comm_set: List[mrt_entry] = [] - most_bogon_asns: Dict[str, set] = {} - most_unknown_attrs: List[mrt_entry] = [] - origin_asns_prefix: Dict[str, set] = {} - upd_prefix: Dict[str, dict] = {} - advt_per_origin_asn: Dict[str, int] = {} - upd_peer_asn: Dict[str, dict] = {} + non_bogon_asns: dict[str, None] = {} + bogon_origin_asns: list[mrt_entry] = [] + bogon_prefix_entries: list[mrt_entry] = [] + highest_med_prefixes: list[mrt_entry] = [] + invalid_len_entries: list[mrt_entry] = [] + longest_as_path: list[mrt_entry] = [] + longest_comm_set: list[mrt_entry] = [] + most_bogon_asns: dict[str, set] = {} + most_unknown_attrs: list[mrt_entry] = [] + origin_asns_prefix: dict[str, set] = {} + upd_prefix: dict[str, dict] = {} + advt_per_origin_asn: dict[str, int] = {} + upd_peer_asn: dict[str, dict] = {} # If parsing a chunk of an MRT file, try to work out the orig filename orig_filename = "" if cfg.SPLIT_DIR: - orig_filename = '_'.join(filename.split("_")[:-1]) + orig_filename = "_".join(filename.split("_")[:-1]) if not os.path.isfile(orig_filename): orig_filename = filename if not orig_filename: @@ -142,7 +142,6 @@ def parse_upd_dump(filename: str = None) -> 'mrt_stats': # Sometimes the MRT files contain corrupt BGP UPDATES try: for idx, mrt_e in enumerate(mrt_entries): - """ Some RIPE UPDATE MRTs contain the BGP state change events, whereas Route-Views don't. @@ -177,12 +176,12 @@ def parse_upd_dump(filename: str = None) -> 'mrt_stats': next(iter(mrt_e.data["timestamp"].items()))[0] ) # E.g., 1486801684 - bogon_prefixes: List[str] = [] - comm_set: List[str] = [] - invalid_len: List[str] = [] + bogon_prefixes: list[str] = [] + comm_set: list[str] = [] + invalid_len: list[str] = [] med = cfg.MISSING_MED - prefixes: List[str] = [] - unknown_attrs: Set[int] = set() + prefixes: list[str] = [] + unknown_attrs: set[int] = set() peer_asn = mrt_e.data["peer_as"] if peer_asn not in upd_peer_asn: @@ -995,7 +994,7 @@ def parse_upd_dump(filename: str = None) -> 'mrt_stats': return mrt_s @staticmethod - def mrt_count(filename: str = None) -> int: + def mrt_count(filename: str) -> int: """ Return the total number of MRT records in an MRT file. """ diff --git a/dnas/dnas/mrt_splitter.py b/dnas/dnas/mrt_splitter.py index 4fe9fe8..5922443 100644 --- a/dnas/dnas/mrt_splitter.py +++ b/dnas/dnas/mrt_splitter.py @@ -3,40 +3,40 @@ import gzip import logging import os -from typing import Any, List, NoReturn, Tuple +from io import BufferedReader +from typing import NoReturn, Tuple, Union from dnas.mrt_archives import mrt_archives + class MrtFormatError(Exception): """ Exception for invalid MRT formatted data. """ - def __init__(self, message: str = ""): + def __init__(self: "MrtFormatError", message: str = "") -> None: Exception.__init__(self) self.message = message - def __str__(self) -> str: + def __str__(self: "MrtFormatError") -> str: if self.message: return self.message else: return "MrtFormatError" + class mrt_splitter: """ Splitter for MRT files. Copy-pasta of the original mrtparer lib to split an MRT file into N files. """ - def __init__(self, filename: str = None) -> None: - + def __init__(self: "mrt_splitter", filename: str) -> None: if not filename: raise ValueError("MRT filename missing") if type(filename) != str: - raise TypeError( - f"filename is not a string: {type(filename)}" - ) + raise TypeError(f"filename is not a string: {type(filename)}") if not os.path.isfile(filename): raise FileNotFoundError( @@ -44,38 +44,38 @@ def __init__(self, filename: str = None) -> None: ) self.data: bytearray - self.f: Any + self.f: Union[bz2.BZ2File, gzip.GzipFile, BufferedReader] self.filename = filename # Magic Number - GZIP_MAGIC = b'\x1f\x8b' - BZ2_MAGIC = b'\x42\x5a\x68' + GZIP_MAGIC = b"\x1f\x8b" + BZ2_MAGIC = b"\x42\x5a\x68" - f = open(filename, 'rb') + f = open(filename, "rb") hdr = f.read(max(len(BZ2_MAGIC), len(GZIP_MAGIC))) f.close() if hdr.startswith(BZ2_MAGIC): - self.f = bz2.BZ2File(filename, 'rb') + self.f = bz2.BZ2File(filename, "rb") logging.debug("Assuming BZ2 file") elif hdr.startswith(GZIP_MAGIC): - self.f = gzip.GzipFile(filename, 'rb') + self.f = gzip.GzipFile(filename, "rb") logging.debug("Assuming GZIP file") else: - self.f = open(filename, 'rb') + self.f = open(filename, "rb") logging.debug("Assuming binary file") - def close(self) -> NoReturn: + def close(self: "mrt_splitter") -> NoReturn: """ Close the open MRT file. """ self.f.close() raise StopIteration - def __iter__(self) -> 'mrt_splitter': + def __iter__(self: "mrt_splitter") -> "mrt_splitter": return self - def __next__(self) -> 'mrt_splitter': + def __next__(self: "mrt_splitter") -> "mrt_splitter": """ Move to the next entry in the MRT file. """ @@ -102,7 +102,9 @@ def __next__(self) -> 'mrt_splitter': return self - def split(self, no_chunks: int = None, outdir: str = None) -> Tuple[int, List[str]]: + def split( + self: "mrt_splitter", no_chunks: int, outdir: str + ) -> Tuple[int, list[str]]: """ Split the MRT data into N equal sized chunks written to disk. Return the total number of MRT entries and the list of chunk filenames. @@ -110,9 +112,7 @@ def split(self, no_chunks: int = None, outdir: str = None) -> Tuple[int, List[st if not self.f: raise AttributeError("No MRT file is currently open") - if (not no_chunks or - not isinstance(no_chunks, int) or - no_chunks < 1): + if not no_chunks or not isinstance(no_chunks, int) or no_chunks < 1: raise ValueError( f"Number of chunks to split MRT file into must be a positive " f"integer, not {no_chunks}" @@ -133,8 +133,7 @@ def split(self, no_chunks: int = None, outdir: str = None) -> Tuple[int, List[st chunk_name = self.filename + "_" + str(i) chunk_filenames.append(chunk_name) chunk_outpath = os.path.join( - outdir, - os.path.basename(self.filename) + "_" + str(i) + outdir, os.path.basename(self.filename) + "_" + str(i) ) logging.debug(f"Opening {chunk_outpath} for output") f = open(chunk_outpath, "wb") diff --git a/dnas/dnas/mrt_stats.py b/dnas/dnas/mrt_stats.py index 4ee2708..f7ef90d 100644 --- a/dnas/dnas/mrt_stats.py +++ b/dnas/dnas/mrt_stats.py @@ -1,6 +1,6 @@ import datetime import json -from typing import Dict, List, Set +import typing from dnas.config import config as cfg from dnas.mrt_archive import mrt_archive @@ -13,33 +13,33 @@ class mrt_stats: This stores the starts from a parsed data source (i.e. a BGP MRT dump). """ - def __init__(self) -> None: - self.archive_list: Set[ + def __init__(self: "mrt_stats") -> None: + self.archive_list: set[ str ] = set() # Archives from which this stats object was populated - self.bogon_origin_asns: List[mrt_entry] = [] - self.bogon_prefixes: List[mrt_entry] = [] - self.highest_med_prefixes: List[mrt_entry] = [] - self.invalid_len: List[mrt_entry] = [] - self.longest_as_path: List[mrt_entry] = [] - self.longest_comm_set: List[mrt_entry] = [] - self.most_advt_prefixes: List[mrt_entry] = [] - self.most_bogon_asns: List[mrt_entry] = [] - self.most_upd_prefixes: List[mrt_entry] = [] - self.most_withd_prefixes: List[mrt_entry] = [] - self.most_advt_origin_asn: List[mrt_entry] = [] - self.most_advt_peer_asn: List[mrt_entry] = [] - self.most_upd_peer_asn: List[mrt_entry] = [] - self.most_withd_peer_asn: List[mrt_entry] = [] - self.most_origin_asns: List[mrt_entry] = [] - self.most_unknown_attrs: List[mrt_entry] = [] - self.file_list: List[str] = [] + self.bogon_origin_asns: list[mrt_entry] = [] + self.bogon_prefixes: list[mrt_entry] = [] + self.highest_med_prefixes: list[mrt_entry] = [] + self.invalid_len: list[mrt_entry] = [] + self.longest_as_path: list[mrt_entry] = [] + self.longest_comm_set: list[mrt_entry] = [] + self.most_advt_prefixes: list[mrt_entry] = [] + self.most_bogon_asns: list[mrt_entry] = [] + self.most_upd_prefixes: list[mrt_entry] = [] + self.most_withd_prefixes: list[mrt_entry] = [] + self.most_advt_origin_asn: list[mrt_entry] = [] + self.most_advt_peer_asn: list[mrt_entry] = [] + self.most_upd_peer_asn: list[mrt_entry] = [] + self.most_withd_peer_asn: list[mrt_entry] = [] + self.most_origin_asns: list[mrt_entry] = [] + self.most_unknown_attrs: list[mrt_entry] = [] + self.file_list: list[str] = [] self.timestamp: str = "" self.total_upd: int = 0 # All updates received/parsed self.total_advt: int = 0 # Updates signalling prefix avertisement self.total_withd: int = 0 # Updates signalling prefix withdrawal - def add(self, merge_data: 'mrt_stats' = None) -> bool: + def add(self: "mrt_stats", merge_data: "mrt_stats") -> bool: """ This function adds another MRT stats object into this one. This means that values which are equal in both objects are added and @@ -784,7 +784,7 @@ def add(self, merge_data: 'mrt_stats' = None) -> bool: # Most origin ASNs per prefix tmp = [] - self_prefixes: Dict[str, None] = {} + self_prefixes: dict[str, None] = {} for mrt_e in self.most_origin_asns: self_prefixes[mrt_e.prefix] = None # ^ This is a hack to speed up this section up: @@ -934,13 +934,17 @@ def add(self, merge_data: 'mrt_stats' = None) -> bool: return changed - def add_archive(self, name: str = None): + def add_archive(self: "mrt_stats", name: str) -> None: """ Add the name of an MRT archive to the list if it isn't already present. """ + if not name: + raise ValueError(f"name is required for") self.archive_list.add(name) - def equal_to(self, mrt_s: 'mrt_stats' = None, meta: bool = False) -> bool: + def equal_to( + self: "mrt_stats", mrt_s: "mrt_stats", meta: bool = False + ) -> bool: """ Return True if this MRT stats obj is the same as mrt_s, else False. Comparing meta data like file list or timestamp is optional. @@ -1145,7 +1149,7 @@ def equal_to(self, mrt_s: 'mrt_stats' = None, meta: bool = False) -> bool: return True - def from_file(self, filename: str = None): + def from_file(self: "mrt_stats", filename: str) -> None: """ Load and parse MRT stats obj from a JSON text file. """ @@ -1158,7 +1162,7 @@ def from_file(self, filename: str = None): with open(filename, "r") as f: self.from_json(f.read()) - def from_json(self, json_str: str = None): + def from_json(self: "mrt_stats", json_str: str) -> None: """ Parse the JSON string as MRT stats data. To provide backward connectivity with old data in Redis, need to check @@ -1308,7 +1312,7 @@ def from_json(self, json_str: str = None): self.total_withd = int(json_dict["total_withd"]) @staticmethod - def gen_ts_from_ymd(ymd: str = None) -> str: + def gen_ts_from_ymd(ymd: str) -> str: """ Generate and return the timestamp for a specific day, for use when creating an mrt_stats objects which contains the summary data for a @@ -1327,7 +1331,7 @@ def gen_ts_from_ymd(ymd: str = None) -> str: ) @staticmethod - def gen_daily_key(ymd: str = None) -> str: + def gen_daily_key(ymd: str) -> str: """ Generate the redis key used to store the global stats obj for a specific day. @@ -1343,7 +1347,7 @@ def gen_daily_key(ymd: str = None) -> str: return "DAILY:" + ymd @staticmethod - def gen_diff_key(ymd: str = None) -> str: + def gen_diff_key(ymd: str) -> str: """ Generate the redis key used to store the diff stats obj for a specific day. @@ -1365,7 +1369,7 @@ def gen_global_key() -> str: """ return "GLOBAL" - def get_diff(self, mrt_s: 'mrt_stats' = None) -> 'mrt_stats': + def get_diff(self: "mrt_stats", mrt_s: "mrt_stats") -> "mrt_stats": """ Generate an mrt_stats obj with entries unique to mrt_s. Don't diff meta data like timestamp or file list. @@ -1558,7 +1562,7 @@ def get_diff(self, mrt_s: 'mrt_stats' = None) -> 'mrt_stats': return diff - def get_diff_larger(self, mrt_s: 'mrt_stats' = None) -> 'mrt_stats': + def get_diff_larger(self: "mrt_stats", mrt_s: "mrt_stats") -> "mrt_stats": """ Generate an mrt_stats obj with entries which are unique to mrt_s, and are larger than the equivilent values in this obj. For example, only @@ -1890,7 +1894,7 @@ def get_diff_larger(self, mrt_s: 'mrt_stats' = None) -> 'mrt_stats': return diff @staticmethod - def gen_prev_daily_key(ymd: str = None) -> str: + def gen_prev_daily_key(ymd: str) -> str: """ Generate the redis key used to store the global stats obj for the day before a specific day. @@ -1909,7 +1913,7 @@ def gen_prev_daily_key(ymd: str = None) -> str: cfg.DAY_FORMAT, ) - def is_empty(self) -> bool: + def is_empty(self: "mrt_stats") -> bool: """ Check if an mrt_stats object is empty. Don't check meta data like file list or timestamp. @@ -1941,7 +1945,7 @@ def is_empty(self) -> bool: else: return False - def merge(self, merge_data: 'mrt_stats' = None) -> bool: + def merge(self: "mrt_stats", merge_data: "mrt_stats") -> bool: """ This functions takes the bigger stat from the local object and merge_data object, and stores the bigger of the two back in this object. @@ -2453,14 +2457,16 @@ def merge(self, merge_data: 'mrt_stats' = None) -> bool: return changed - def merge_archives(self, mrt_s: 'mrt_stats' = None): + def merge_archives(self: "mrt_stats", mrt_s: "mrt_stats"): """ Add MRT archive names from mrt_s to this stats object, only if they are missing. """ + if not mrt_s: + raise ValueError("mrt_s is required") self.archive_list.update(mrt_s.archive_list) - def print(self): + def print(self: "mrt_stats") -> None: """ Ugly print the stats in this obj. """ @@ -2751,7 +2757,7 @@ def print(self): if self.timestamp: print(f"timestamp: {self.timestamp}") - def to_file(self, filename: str = None): + def to_file(self: "mrt_stats", filename: str) -> None: """ Serialise the MRT stats obj to JSON, save JSON as text file. """ @@ -2764,7 +2770,9 @@ def to_file(self, filename: str = None): with open(filename, "w") as f: f.write(self.to_json()) - def to_json(self) -> str: + def to_json( + self: "mrt_stats", indent: int = cfg.MRT_STATS_JSON_INDENT + ) -> str: """ Serialise the MRT stats obj to JSON, and returns the JSON string. """ @@ -2822,9 +2830,9 @@ def to_json(self) -> str: "file_list": self.file_list, "timestamp": self.timestamp, } - return json.dumps(json_data, indent=2) + return json.dumps(json_data, indent=indent) - def ts_ymd(self) -> str: + def ts_ymd(self: "mrt_stats") -> str: """ Return only the YMD from this obj's timestamp raw e.g. YYYYMMDD """ @@ -2833,7 +2841,7 @@ def ts_ymd(self) -> str: return self.timestamp.split(".")[0] - def ts_ymd_format(self) -> str: + def ts_ymd_format(self: "mrt_stats") -> str: """ Return only the YMD from this obj's timestamp formatted e.g. YYYY/MM/DD """ diff --git a/dnas/dnas/redis_db.py b/dnas/dnas/redis_db.py index 57d6c65..c85b06e 100644 --- a/dnas/dnas/redis_db.py +++ b/dnas/dnas/redis_db.py @@ -1,24 +1,25 @@ import json -import redis -from typing import Any, Dict, List, Union +from typing import Union -from dnas.redis_auth import redis_auth # type: ignore from dnas.mrt_stats import mrt_stats +from dnas.redis_auth import redis_auth # type: ignore from dnas.twitter_msg import twitter_msg +from redis.client import Redis -class redis_db(): + +class redis_db: """ Class to manage connection to Redis DB and martial data in and out. """ - def __init__(self) -> None: - self.r = redis.Redis( + def __init__(self: "redis_db") -> None: + self.r = Redis( host=redis_auth.host, port=redis_auth.port, password=redis_auth.password, ) - def add_to_queue(self, key: str = None, json_str: str = None): + def add_to_queue(self: "redis_db", key: str, json_str: str) -> None: """ Push to a list a strings. For example, a Tweet serialised to a JSON string. @@ -29,19 +30,17 @@ def add_to_queue(self, key: str = None, json_str: str = None): ) if type(json_str) != str: - raise TypeError( - f"json_str is not a string: {type(json_str)}" - ) + raise TypeError(f"json_str is not a string: {type(json_str)}") self.r.lpush(key, json_str) - def close(self): + def close(self: "redis_db") -> None: """ Close the redis connection. """ self.r.close() - def del_from_queue(self, key: str = None, elem: str = None): + def del_from_queue(self: "redis_db", key: str, elem: str) -> None: """ Delete an entry from a list of strings. """ @@ -51,24 +50,20 @@ def del_from_queue(self, key: str = None, elem: str = None): ) if type(elem) != str: - raise TypeError( - f"elem is not a string: {type(elem)}" - ) + raise TypeError(f"elem is not a string: {type(elem)}") self.r.lrem(key, 0, elem) - def delete(self, key: str = None) -> int: + def delete(self: "redis_db", key: str) -> int: """ Delete key entry in Redis. """ if not key: - raise ValueError( - f"Missing required arguments: key={key}" - ) + raise ValueError(f"Missing required arguments: key={key}") return self.r.delete(key) - def from_file(self, filename: str = None): + def from_file(self: "redis_db", filename: str) -> None: """ Restore redis DB from JSON file. """ @@ -80,7 +75,7 @@ def from_file(self, filename: str = None): with open(filename, "r") as f: self.from_json(f.read()) - def from_json(self, json_str: str = None): + def from_json(self: "redis_db", json_str: str): """ Restore redis DB from a JSON string """ @@ -93,14 +88,12 @@ def from_json(self, json_str: str = None): for k in json_dict.keys(): self.r.set(k, json_dict[k]) - def get(self, key: str = None) -> Union[Any, List[Any]]: + def get(self: "redis_db", key: str) -> Union[str, list]: """ Return the value stored in "key" from Redis """ if not key: - raise ValueError( - f"Missing required arguments: key={key}" - ) + raise ValueError(f"Missing required arguments: key={key}") t = self.r.type(key).decode("utf-8") if t == "string": @@ -114,29 +107,23 @@ def get(self, key: str = None) -> Union[Any, List[Any]]: elif t == "list": return [x.decode("utf-8") for x in self.r.lrange(key, 0, -1)] else: - raise TypeError( - f"Unknown redis data type stored under {key}: {t}" - ) + raise TypeError(f"Unknown redis data type stored under {key}: {t}") - def get_keys(self, pattern: str = None) -> List[Any]: + def get_keys(self: "redis_db", pattern: str) -> list: """ Return list of Redis keys that match search pattern. """ if not pattern: - raise ValueError( - f"Missing required arguments: pattern={pattern}" - ) + raise ValueError(f"Missing required arguments: pattern={pattern}") return [x.decode("utf-8") for x in self.r.keys(pattern)] - def get_queue_msgs(self, key: str = None) -> List['twitter_msg']: + def get_queue_msgs(self: "redis_db", key: str) -> list["twitter_msg"]: """ Return the list of Tweets stored under key as Twitter messages objects. """ if not key: - raise ValueError( - f"Missing required arguments: key={key}" - ) + raise ValueError(f"Missing required arguments: key={key}") """ Return from list in reverse order, to present items in the same order @@ -154,14 +141,12 @@ def get_queue_msgs(self, key: str = None) -> List['twitter_msg']: return msgs - def get_stats(self, key: str = None) -> Union[None, 'mrt_stats']: + def get_stats(self: "redis_db", key: str) -> Union[None, "mrt_stats"]: """ Return MRT stats from Redis as JSON, and return as an MRT stats object. """ if not key: - raise ValueError( - f"Missing required arguments: key={key}" - ) + raise ValueError(f"Missing required arguments: key={key}") mrt_s = mrt_stats() json_str = self.r.get(key) @@ -171,7 +156,7 @@ def get_stats(self, key: str = None) -> Union[None, 'mrt_stats']: mrt_s.from_json(json_str.decode("utf-8")) return mrt_s - def set_stats(self, key: str = None, mrt_s: 'mrt_stats' = None): + def set_stats(self: "redis_db", key: str, mrt_s: "mrt_stats"): """ Take an MRT stats object, serialise it to JSON, store in Redis. """ @@ -182,14 +167,12 @@ def set_stats(self, key: str = None, mrt_s: 'mrt_stats' = None): self.r.set(key, mrt_s.to_json()) - def set_stats_json(self, key: str = None, json_str: str = None): + def set_stats_json(self: "redis_db", key: str, json_str: str): """ Take JSON serialisation of an MRT stats object, and store in Redis. """ if not key: - raise ValueError( - f"Missing required arguments: key={key}" - ) + raise ValueError(f"Missing required arguments: key={key}") if not json_str: raise ValueError( @@ -198,7 +181,7 @@ def set_stats_json(self, key: str = None, json_str: str = None): self.r.set(key, json_str) - def to_file(self, filename: str = None): + def to_file(self: "redis_db", filename: str): """ Dump the entire redis DB to a JSON file. """ @@ -210,13 +193,12 @@ def to_file(self, filename: str = None): with open(filename, "w") as f: f.write(self.to_json()) - def to_json(self) -> str: + def to_json(self: "redis_db") -> str: """ Dump the entire redis DB to JSON """ - d: Dict[str, Any] = {} + d: dict = {} for k in self.r.keys("*"): - t = self.r.type(k).decode("utf-8") if t == "string": val = self.r.get(k) @@ -227,7 +209,9 @@ def to_json(self) -> str: f"Couldn't decode data stored under key {k.decode('utf-8')}" ) elif t == "list": - d[k.decode("utf-8")] = [x.decode("utf-8") for x in self.r.lrange(k, 0, -1)] + d[k.decode("utf-8")] = [ + x.decode("utf-8") for x in self.r.lrange(k, 0, -1) + ] else: raise TypeError( f"Unsupported data type {t} stored under key {k.decode('utf-8')}" diff --git a/dnas/dnas/report.py b/dnas/dnas/report.py index 2503f61..81a82dd 100644 --- a/dnas/dnas/report.py +++ b/dnas/dnas/report.py @@ -1,6 +1,6 @@ import datetime import logging -from typing import List +import typing from dnas.config import config as cfg from dnas.mrt_stats import mrt_stats @@ -14,9 +14,7 @@ class report: """ @staticmethod - def gen_txt_report( - mrt_s: 'mrt_stats' = None, body: bool = True - ) -> List[str]: + def gen_txt_report(mrt_s: "mrt_stats", body: bool = True) -> list[str]: """ Generate a text report using the data in an mrt stats object. If body == False, only generate the headline info for each stat. @@ -132,7 +130,6 @@ def gen_txt_report( if mrt_s.highest_med_prefixes: # Check this isn't an empty mrt_entry if mrt_s.highest_med_prefixes[0].prefix: - text = ( f"Prefixes with the highest MED: " f"{len(mrt_s.highest_med_prefixes)} prefix(es) had a MED " @@ -149,7 +146,9 @@ def gen_txt_report( if peeras not in whois_cache: whois_cache[peeras] = whois.as_lookup(int(peeras)) if whois_cache[peeras]: - text += f"via peer AS{peeras} ({whois_cache[peeras]}) " + text += ( + f"via peer AS{peeras} ({whois_cache[peeras]}) " + ) else: text += f"via peer AS{peeras} " text += f"from origin ASN(s)" @@ -176,7 +175,6 @@ def gen_txt_report( txt_report.append(text) if body: - text = "" for mrt_e in mrt_s.longest_as_path: text += f"Prefix {mrt_e.prefix} " @@ -212,7 +210,6 @@ def gen_txt_report( txt_report.append(text) if body: - text = "" for mrt_e in mrt_s.longest_comm_set: text += f"Prefix {mrt_e.prefix} " @@ -275,7 +272,6 @@ def gen_txt_report( txt_report.append(text) if body: - text = "Prefix(es):" for mrt_e in mrt_s.most_advt_prefixes: text += f" {mrt_e.prefix}" @@ -293,7 +289,6 @@ def gen_txt_report( txt_report.append(text) if body: - text = "Prefix(es):" for mrt_e in mrt_s.most_upd_prefixes: text += f" {mrt_e.prefix}" @@ -311,7 +306,6 @@ def gen_txt_report( txt_report.append(text) if body: - text = "Prefix(es):" for mrt_e in mrt_s.most_withd_prefixes: text += f" {mrt_e.prefix}" @@ -329,7 +323,6 @@ def gen_txt_report( txt_report.append(text) if body: - text = "Origin ASN(s):" for mrt_e in mrt_s.most_advt_origin_asn: for asn in mrt_e.origin_asns: @@ -356,7 +349,6 @@ def gen_txt_report( txt_report.append(text) if body: - text = "Peer ASN(s):" for mrt_e in mrt_s.most_advt_peer_asn: if mrt_e.peer_asn not in whois_cache: @@ -384,7 +376,6 @@ def gen_txt_report( txt_report.append(text) if body: - text = "Peer ASN(s):" for mrt_e in mrt_s.most_upd_peer_asn: if mrt_e.peer_asn not in whois_cache: @@ -412,7 +403,6 @@ def gen_txt_report( txt_report.append(text) if body: - text = "Peer ASN(s):" for mrt_e in mrt_s.most_withd_peer_asn: if mrt_e.peer_asn not in whois_cache: @@ -440,7 +430,6 @@ def gen_txt_report( txt_report.append(text) if body: - text = "" for mrt_e in mrt_s.most_origin_asns: text += f"Prefix {mrt_e.prefix} from origin ASN(s)" @@ -469,7 +458,6 @@ def gen_txt_report( txt_report.append(text) if body: - text = "" for mrt_e in mrt_s.most_unknown_attrs: text += f"Prefix {mrt_e.prefix} from origin ASN(s) " @@ -490,7 +478,7 @@ def gen_txt_report( return txt_report @staticmethod - def gen_txt_report_fn_ymd(ymd: str = None) -> str: + def gen_txt_report_fn_ymd(ymd: str) -> str: """ Generate and return the filename for the text file report for a specific date. diff --git a/dnas/dnas/twitter.py b/dnas/dnas/twitter.py index 56a79ac..3ce6fbf 100644 --- a/dnas/dnas/twitter.py +++ b/dnas/dnas/twitter.py @@ -1,7 +1,7 @@ import logging -import tweepy # type: ignore -from typing import List +import typing +import tweepy # type: ignore from dnas.config import config as cfg from dnas.mrt_archive import mrt_archive from dnas.mrt_stats import mrt_stats @@ -16,8 +16,7 @@ class twitter: Class for interacting with Twitter API using Tweepy. """ - def __init__(self) -> None: - + def __init__(self: "twitter") -> None: self.client = tweepy.Client( consumer_key=twitter_auth.consumer_key, consumer_secret=twitter_auth.consumer_secret, @@ -25,7 +24,7 @@ def __init__(self) -> None: access_token_secret=twitter_auth.access_token_secret, ) - def delete(self, tweet_id: int = None): + def delete(self: "twitter", tweet_id: int) -> None: """ Delete a Tweet from twitter.com """ @@ -44,7 +43,7 @@ def delete(self, tweet_id: int = None): raise RuntimeError(f"Error deleting Tweet {tweet_id}: {r}") @staticmethod - def gen_tweets(mrt_s: 'mrt_stats' = None) -> List['twitter_msg']: + def gen_tweets(mrt_s: "mrt_stats") -> list["twitter_msg"]: """ Generate Tweets using the data in an mrt stats object. """ @@ -68,7 +67,7 @@ def gen_tweets(mrt_s: 'mrt_stats' = None) -> List['twitter_msg']: return msg_q - def split_tweet(self, msg: 'twitter_msg' = None) -> List[str]: + def split_tweet(self: "twitter", msg: "twitter_msg") -> list[str]: """ Return a Tweet body split into a list of 280 character strings """ @@ -99,11 +98,11 @@ def split_tweet(self, msg: 'twitter_msg' = None) -> List[str]: return chunks def tweet( - self, + self: "twitter", + msg: "twitter_msg", body: bool = False, - msg: 'twitter_msg' = None, print_only: bool = False, - ): + ) -> None: """ Tweet the header of a twitter message obj. Then tweet the body as a series of paged replies. @@ -119,11 +118,11 @@ def tweet( self.tweet_body(msg, print_only) def tweet_as_reply( - self, - msg: 'twitter_msg' = None, + self: "twitter", + msg: "twitter_msg", print_only: bool = False, tweet_id: int = 0, - ): + ) -> None: """ Tweet a message in reply to an existing Tweet. """ @@ -156,7 +155,9 @@ def tweet_as_reply( ) msg.hdr_id = int(r.data["id"]) - def tweet_hdr(self, msg: 'twitter_msg' = None, print_only: bool = False): + def tweet_hdr( + self: "twitter", msg: "twitter_msg", print_only: bool = False + ) -> None: """ Tweet a message header. """ @@ -186,7 +187,9 @@ def tweet_hdr(self, msg: 'twitter_msg' = None, print_only: bool = False): ) msg.hdr_id = int(r.data["id"]) - def tweet_body(self, msg: 'twitter_msg' = None, print_only: bool = False): + def tweet_body( + self: "twitter", msg: "twitter_msg", print_only: bool = False + ) -> None: """ Tweet a message body as a series of pages replies to the header. """ @@ -225,7 +228,7 @@ def tweet_body(self, msg: 'twitter_msg' = None, print_only: bool = False): msg.body_ids.append(r.data["id"]) @staticmethod - def ymd_to_nice(ymd: str = None) -> str: + def ymd_to_nice(ymd: str) -> str: """ Convert a ymd value to a nice format for Twitter. """ diff --git a/dnas/dnas/twitter_msg.py b/dnas/dnas/twitter_msg.py index 5efbd06..892a27e 100644 --- a/dnas/dnas/twitter_msg.py +++ b/dnas/dnas/twitter_msg.py @@ -1,5 +1,6 @@ import json -from typing import List +import typing + class twitter_msg: """ @@ -7,13 +8,13 @@ class twitter_msg: """ def __init__( - self, - hdr: str = "", - hdr_id: int = 0, - body: str = "", - body_ids: List[int] = [], - hidden: bool = True, - ) -> None: + self: "twitter_msg", + hdr: str = "", + hdr_id: int = 0, + body: str = "", + body_ids: list[int] = [], + hidden: bool = True, + ) -> None: """ The header contains the main message to be tweeted. It must be is <= cfg.TWITTER_LEN @@ -42,7 +43,7 @@ def __init__( """ self.hidden = hidden - def from_json(self, json_str: str = None): + def from_json(self: "twitter_msg", json_str: str) -> None: """ Populate this object with data from a JSON string. """ @@ -52,9 +53,7 @@ def from_json(self, json_str: str = None): ) if type(json_str) != str: - raise TypeError( - f"json_str is not a string: {type(json_str)}" - ) + raise TypeError(f"json_str is not a string: {type(json_str)}") json_data = json.loads(json_str) self.hdr = json_data["hdr"] @@ -64,40 +63,32 @@ def from_json(self, json_str: str = None): self.hidden = json_data["hidden"] @staticmethod - def gen_tweeted_q_key(ymd: str = None): + def gen_tweeted_q_key(ymd: str) -> str: """ Return the redis key for the tweeted queue, for a specific day. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}") if type(ymd) != str: - raise TypeError( - f"ymd is not an str: {type(ymd)}" - ) + raise TypeError(f"ymd is not an str: {type(ymd)}") return "TWEETED:" + ymd @staticmethod - def gen_tweet_q_key(ymd: str = None): + def gen_tweet_q_key(ymd: str) -> str: """ Return the redis key for the tweet queue, for a days tweets. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}") if type(ymd) != str: - raise TypeError( - f"ymd is not an str: {type(ymd)}" - ) + raise TypeError(f"ymd is not an str: {type(ymd)}") return "TWEET_Q:" + ymd - def to_json(self) -> str: + def to_json(self: "twitter_msg") -> str: """ Return the twitter message serialised as a json string. """ diff --git a/dnas/dnas/whois.py b/dnas/dnas/whois.py index bbb35ac..c7bcf25 100644 --- a/dnas/dnas/whois.py +++ b/dnas/dnas/whois.py @@ -5,19 +5,15 @@ from dnas.bogon_asn import bogon_asn -class whois: +class whois: @staticmethod def as_lookup(asn: int = -1) -> str: if asn < 0: - raise ValueError( - f"Missing required arguments: asn={asn}" - ) + raise ValueError(f"Missing required arguments: asn={asn}") if type(asn) != int: - raise TypeError( - f"asn is not string: {type(asn)}" - ) + raise TypeError(f"asn is not string: {type(asn)}") if bogon_asn.is_bogon(asn): return "" @@ -60,7 +56,7 @@ def as_lookup(asn: int = -1) -> str: for line in output.split("\n"): if "owner:" in line: - tmp = ' '.join(line.split()[1:]) + tmp = " ".join(line.split()[1:]) if tmp.strip(): as_name = tmp break @@ -69,7 +65,7 @@ def as_lookup(asn: int = -1) -> str: for line in output.split("\n"): if "ASName:" in line: - tmp = ' '.join(line.split()[1:]) + tmp = " ".join(line.split()[1:]) if tmp.strip(): as_name = tmp break @@ -78,7 +74,7 @@ def as_lookup(asn: int = -1) -> str: for line in output.split("\n"): if "OrgName:" in line: - tmp = ' '.join(line.split()[1:]) + tmp = " ".join(line.split()[1:]) if tmp.strip(): as_name = tmp break diff --git a/dnas/requirements.txt b/dnas/requirements.txt index 6446de9..397be8f 100644 --- a/dnas/requirements.txt +++ b/dnas/requirements.txt @@ -1,15 +1,20 @@ async-timeout==4.0.2 +black==23.10.0 certifi==2023.7.22 charset-normalizer==2.0.12 Deprecated==1.2.13 idna==3.3 +isort==5.12.0 mrtparse==2.2.0 +mypy==1.6.1 oauthlib==3.2.2 -packaging==21.3 +packaging==23.2 pyparsing==3.0.9 +pytest==7.4.2 redis==4.4.4 requests==2.31.0 requests-oauthlib==1.3.1 +tox==4.11.3 tweepy==4.10.0 urllib3==1.26.17 wrapt==1.14.1 diff --git a/dnas/scripts/get_mrts.py b/dnas/scripts/get_mrts.py index 456c03f..f35ed72 100755 --- a/dnas/scripts/get_mrts.py +++ b/dnas/scripts/get_mrts.py @@ -4,46 +4,41 @@ import datetime import logging import os -import requests import sys import time -from typing import Any, Dict, List +import typing + +import requests # Accomodate the use of the dnas library, even when the library isn't installed sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.config import config as cfg from dnas.log import log from dnas.mrt_archives import mrt_archives -from dnas.redis_db import redis_db from dnas.mrt_getter import mrt_getter +from dnas.redis_db import redis_db -def continuous(args: Dict[str, Any] = None): + +def continuous(args: dict) -> None: """ Continuous check for new MRT files and download them from the configured MRT archives. """ if not args: - raise ValueError( - f"Missing required arguments: args={args}" - ) + raise ValueError(f"Missing required arguments: args={args}") if type(args) != dict: - raise TypeError( - f"args is not a dict: {type(args)}" - ) + raise TypeError(f"args is not a dict: {type(args)}") mrt_a = mrt_archives() min_interval = cfg.DFT_INTERVAL - while(True): + while True: for arch in mrt_a.archives: - if (args["enabled"] and not arch.ENABLED): + if args["enabled"] and not arch.ENABLED: continue logging.debug(f"Archive {arch.NAME} is enabled") @@ -67,7 +62,7 @@ def continuous(args: Dict[str, Any] = None): which provides the most frequent dumps: """ if (arch.RIB_INTERVAL * 60) < min_interval: - min_interval = (arch.RIB_INTERVAL * 60) + min_interval = arch.RIB_INTERVAL * 60 logging.debug( f"Get interval set to {min_interval} by " f"{arch.NAME} RIB interval" @@ -84,7 +79,7 @@ def continuous(args: Dict[str, Any] = None): pass if (arch.UPD_INTERVAL * 60) < min_interval: - min_interval = (arch.UPD_INTERVAL * 60) + min_interval = arch.UPD_INTERVAL * 60 logging.debug( f"Get interval set to {min_interval} by " f"{arch.NAME} UPD interval" @@ -92,19 +87,16 @@ def continuous(args: Dict[str, Any] = None): time.sleep(min_interval) -def get_day(args: Dict[str, Any] = None): + +def get_day(args: dict) -> None: """ Download all the MRTs for a specific day. """ if not args: - raise ValueError( - f"Missing required arguments: args={args}" - ) + raise ValueError(f"Missing required arguments: args={args}") if not args["ymd"]: - raise ValueError( - f"Missing required arguments: ymd={args['ymd']}" - ) + raise ValueError(f"Missing required arguments: ymd={args['ymd']}") url_list = gen_urls_day(args) if not url_list: @@ -112,14 +104,13 @@ def get_day(args: Dict[str, Any] = None): else: get_mrts(replace=args["replace"], url_list=url_list) -def get_latest(args: Dict[str, Any] = None): + +def get_latest(args: dict) -> None: """ Get the latest MRT file from each archive. """ if not args: - raise ValueError( - f"Missing required arguments: args={args}" - ) + raise ValueError(f"Missing required arguments: args={args}") url_list = gen_urls_latest(args) if not url_list: @@ -127,19 +118,16 @@ def get_latest(args: Dict[str, Any] = None): else: get_mrts(replace=args["replace"], url_list=url_list) -def get_mrts(replace: bool = False, url_list: List[str] = None): + +def get_mrts(url_list: list[str], replace: bool = False) -> None: """ Download the list of MRTs from the passed URL list. """ if not url_list: - raise ValueError( - f"Missing required arguments: url_list={url_list}" - ) + raise ValueError(f"Missing required arguments: url_list={url_list}") if type(url_list) != list: - raise TypeError( - f"url_list is not a list: {type(url_list)}" - ) + raise TypeError(f"url_list is not a list: {type(url_list)}") mrt_a = mrt_archives() logging.info(f"Downloading {len(url_list)} MRT files") @@ -155,11 +143,7 @@ def get_mrts(replace: bool = False, url_list: List[str] = None): outage. For this reason, ignore HTTP erros like 404s. """ try: - mrt_getter.download_mrt( - filename=outfile, - replace=replace, - url=url - ) + mrt_getter.download_mrt(filename=outfile, replace=replace, url=url) i += 1 logging.info(f"Downloaded {i}/{len(url_list)}") except requests.exceptions.HTTPError as e: @@ -171,16 +155,15 @@ def get_mrts(replace: bool = False, url_list: List[str] = None): logging.info(f"Finished, downloaded {i}/{len(url_list)}") -def get_range(args: Dict[str, Any] = None): + +def get_range(args: dict) -> None: """ Download all the MRTs for between a specific start and end date inclusive. """ if not args: - raise ValueError( - f"Missing required arguments: args={args}" - ) + raise ValueError(f"Missing required arguments: args={args}") - if (not args["start"] or not args["end"]): + if not args["start"] or not args["end"]: raise ValueError( f"Missing required options: start={args['start']}, " f"end={args['end']}" @@ -192,68 +175,55 @@ def get_range(args: Dict[str, Any] = None): else: get_mrts(replace=args["replace"], url_list=url_list) -def gen_urls_day(args: Dict[str, Any] = None) -> List[str]: + +def gen_urls_day(args: dict) -> list[str]: """ Return a list of URLs for all the MRTs for a specific day. """ if not args: - raise ValueError( - f"Missing required arguments: args={args}" - ) + raise ValueError(f"Missing required arguments: args={args}") if not args["ymd"]: - raise ValueError( - f"Missing required arguments: ymd={args['ymd']}" - ) + raise ValueError(f"Missing required arguments: ymd={args['ymd']}") args["start"] = args["ymd"] + ".0000" args["end"] = args["ymd"] + ".2359" return gen_urls_range(args) -def gen_urls_latest(args: Dict[str, Any] = None) -> List[str]: + +def gen_urls_latest(args: dict) -> list[str]: """ Return a list of URLs for the latest MRT file for each archive """ if not args: - raise ValueError( - f"Missing required arguments: args={args}" - ) + raise ValueError(f"Missing required arguments: args={args}") mrt_a = mrt_archives() url_list = [] for arch in mrt_a.archives: - if (args["enabled"] and not arch.ENABLED): + if args["enabled"] and not arch.ENABLED: continue logging.debug(f"Checking archive {arch.NAME}...") if args["rib"]: - url_list.append( - arch.gen_rib_url( - arch.gen_latest_rib_fn() - ) - ) + url_list.append(arch.gen_rib_url(arch.gen_latest_rib_fn())) if args["update"]: - url_list.append( - arch.gen_upd_url( - arch.gen_latest_upd_fn() - ) - ) + url_list.append(arch.gen_upd_url(arch.gen_latest_upd_fn())) return url_list -def gen_urls_range(args: Dict[str, Any] = None) -> List[str]: + +def gen_urls_range(args: dict) -> list[str]: """ Generate and return a list of URLs for all MRTs betwen a start and end date inclusive. """ if not args: - raise ValueError( - f"Missing required arguments: args={args}" - ) + raise ValueError(f"Missing required arguments: args={args}") - if (not args["start"] or not args["end"]): + if not args["start"] or not args["end"]: raise ValueError( f"Missing required options: start={args['start']}, " f"end={args['end']}" @@ -263,9 +233,7 @@ def gen_urls_range(args: Dict[str, Any] = None) -> List[str]: end = datetime.datetime.strptime(args["end"], cfg.TIME_FORMAT) if end < start: - raise ValueError( - f"End date {end} is before start date {start}" - ) + raise ValueError(f"End date {end} is before start date {start}") diff = end - start no_days = int(diff.total_seconds() // 86400) @@ -274,25 +242,23 @@ def gen_urls_range(args: Dict[str, Any] = None) -> List[str]: url_list = [] for i in range(0, no_days + 1): - delta = datetime.timedelta(days=i) ymd = datetime.datetime.strftime(start + delta, cfg.DAY_FORMAT) for arch in mrt_a.archives: - if (args["enabled"] and not arch.ENABLED): + if args["enabled"] and not arch.ENABLED: continue logging.debug(f"Checking archive {arch.NAME}...") if args["rib"]: - all_rib_filenames = arch.gen_rib_fns_day(ymd) for filename in all_rib_filenames[:]: - raw_ts = '.'.join(filename.split(".")[1:3]) + raw_ts = ".".join(filename.split(".")[1:3]) timestamp = datetime.datetime.strptime( raw_ts, cfg.TIME_FORMAT ) - if (timestamp < start or timestamp > end): + if timestamp < start or timestamp > end: all_rib_filenames.remove(filename) if not all_rib_filenames: @@ -317,7 +283,10 @@ def gen_urls_range(args: Dict[str, Any] = None) -> List[str]: f"Need to backfill {len(all_rib_filenames)} RIB " f"dumps for archive {arch.NAME} on {ymd}" ) - urls = [arch.gen_rib_url(filename) for filename in all_rib_filenames] + urls = [ + arch.gen_rib_url(filename) + for filename in all_rib_filenames + ] logging.debug(f"Adding {urls}") url_list.extend(urls) else: @@ -335,17 +304,19 @@ def gen_urls_range(args: Dict[str, Any] = None) -> List[str]: f"Adding {len(all_rib_filenames)} RIB dumps for " f"archive {arch.NAME} on {ymd}" ) - urls = [arch.gen_rib_url(filename) for filename in all_rib_filenames] + urls = [ + arch.gen_rib_url(filename) + for filename in all_rib_filenames + ] logging.debug(f"Adding {urls}") url_list.extend(urls) if args["update"]: - all_upd_filenames = arch.gen_upd_fns_day(ymd) for filename in all_upd_filenames[:]: timestamp = arch.ts_from_filename(filename) - if (timestamp < start or timestamp > end): + if timestamp < start or timestamp > end: all_upd_filenames.remove(filename) if not all_upd_filenames: @@ -366,7 +337,10 @@ def gen_urls_range(args: Dict[str, Any] = None) -> List[str]: f"Need to backfill {len(all_upd_filenames)} UPDATE " f"dumps for archive {arch.NAME} on {ymd}" ) - urls = [arch.gen_upd_url(filename) for filename in all_upd_filenames] + urls = [ + arch.gen_upd_url(filename) + for filename in all_upd_filenames + ] logging.debug(f"Adding {urls}") url_list.extend(urls) else: @@ -380,14 +354,18 @@ def gen_urls_range(args: Dict[str, Any] = None) -> List[str]: f"Adding {len(all_upd_filenames)} UPDATE dumps for " f"archive {arch.NAME} on {ymd}" ) - urls = [arch.gen_upd_url(filename) for filename in all_upd_filenames] + urls = [ + arch.gen_upd_url(filename) + for filename in all_upd_filenames + ] logging.debug(f"Adding {urls}") url_list.extend(urls) rdb.close() return url_list -def parse_args(): + +def parse_args() -> dict: """ Parse the CLI args to this script. """ @@ -496,20 +474,21 @@ def parse_args(): return vars(parser.parse_args()) -def main(): +def main(): args = parse_args() log.setup( - debug = args["debug"], - log_src = "MRT downloader script", - log_path = cfg.LOG_GETTER, + debug=args["debug"], + log_src="MRT downloader script", + log_path=cfg.LOG_GETTER, ) - if (not args["continuous"] and - not args["latest"] and - not args["range"] and - not args["ymd"] and - not args["yesterday"] + if ( + not args["continuous"] + and not args["latest"] + and not args["range"] + and not args["ymd"] + and not args["yesterday"] ): raise ValueError( "Exactly one of the four modes must be chosen: --continuous, " @@ -538,5 +517,5 @@ def main(): get_day(args) -if __name__ == '__main__': - main() \ No newline at end of file +if __name__ == "__main__": + main() diff --git a/dnas/scripts/git_reports.py b/dnas/scripts/git_reports.py index 999b88f..6855bd5 100755 --- a/dnas/scripts/git_reports.py +++ b/dnas/scripts/git_reports.py @@ -9,20 +9,18 @@ # Accomodate the use of the dnas library, even when the library isn't installed sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.config import config as cfg -from dnas.log import log from dnas.git import git +from dnas.log import log from dnas.mrt_stats import mrt_stats from dnas.redis_db import redis_db from dnas.report import report -def check_git(): + +def check_git() -> None: """ Check if the git repo exists locally, if not, clone it. If it already exists, make a pull to ensure it's up to date. @@ -33,19 +31,16 @@ def check_git(): git.clear() git.pull() -def generate(ymd: str = None): + +def generate(ymd: str) -> None: """ Generate the stats files for a specific day. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}." - ) + raise ValueError(f"Missing required arguments: ymd={ymd}.") if type(ymd) != str: - raise TypeError( - f"ymd is not a string: {type(ymd)}" - ) + raise TypeError(f"ymd is not a string: {type(ymd)}") git_dir = git.gen_git_path_ymd(ymd) os.makedirs(git_dir, exist_ok=True) @@ -66,17 +61,18 @@ def generate(ymd: str = None): rdb.close() -def generate_range(end: str = "", start: str = ""): + +def generate_range(end: str, start: str) -> None: """ A wrapper around the generate function to generate report files for a range of days from start to end inclusive. """ - if (not end and not start): + if not end and not start: raise ValueError( f"Missing required arguments: end={end}, start={start}" ) - if (type(end) != str and type(start) != str): + if type(end) != str and type(start) != str: raise TypeError( f"Both end and start must be strings, not: {type(end)} and " f"{type(start)}" @@ -96,14 +92,15 @@ def generate_range(end: str = "", start: str = ""): f"{end_day}" ) - total = (diff.days + 1) + total = diff.days + 1 for i in range(0, total): delta = datetime.timedelta(days=i) ymd = datetime.datetime.strftime(start_day + delta, cfg.DAY_FORMAT) generate(ymd) logging.info(f"Done {i+1}/{total}") -def parse_args(): + +def parse_args() -> dict: """ Parse the CLI args to this script. """ @@ -173,24 +170,21 @@ def parse_args(): ) return vars(parser.parse_args()) -def publish(ymd: str = None): + +def publish(ymd: str) -> None: """ Commit and push the report files for a specific day to GitHub. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}." - ) + raise ValueError(f"Missing required arguments: ymd={ymd}.") if type(ymd) != str: - raise TypeError( - f"ymd is not a string: {type(ymd)}" - ) + raise TypeError(f"ymd is not a string: {type(ymd)}") git_dir = git.gen_git_path_ymd(ymd) txt_filename = os.path.join(git_dir, report.gen_txt_report_fn_ymd(ymd)) - #git.clear() + # git.clear() git.add(txt_filename) if git.diff(): git.commit(f"Adding report(s) for {ymd}") @@ -202,17 +196,18 @@ def publish(ymd: str = None): else: logging.info(f"No changes to commit to git for {ymd}.") -def publish_range(end: str = "", start: str = ""): + +def publish_range(end: str, start: str) -> None: """ A wrapper around the publish function to publish report files for a range of days from start to end inclusive. """ - if (not end and not start): + if not end and not start: raise ValueError( f"Missing required arguments: end={end}, start={start}" ) - if (type(end) != str and type(start) != str): + if type(end) != str and type(start) != str: raise TypeError( f"Both end and start must be strings, not: {type(end)} and " f"{type(start)}" @@ -239,7 +234,8 @@ def publish_range(end: str = "", start: str = ""): publish(ymd) logging.info(f"Done {i+1}/{total}") -def yesterday(): + +def yesterday() -> None: """ A wrapped function to generate the report and publish for yesterday. """ @@ -250,13 +246,13 @@ def yesterday(): generate(yesterday) publish(yesterday) -def main(): +def main(): args = parse_args() log.setup( - debug = args["debug"], - log_src = "report generation and posting script", - log_path = cfg.LOG_GIT, + debug=args["debug"], + log_src="report generation and posting script", + log_path=cfg.LOG_GIT, ) # Ensure the git repo exists, clone if it doesn't: @@ -268,9 +264,7 @@ def main(): elif args["range"]: generate_range(args["end"], args["start"]) else: - raise ValueError( - "--range or --ymd must be used with --generate!" - ) + raise ValueError("--range or --ymd must be used with --generate!") if args["publish"]: if args["ymd"]: @@ -278,12 +272,11 @@ def main(): elif args["range"]: publish_range(args["end"], args["start"]) else: - raise ValueError( - "--range or --ymd must be used with --publish!" - ) + raise ValueError("--range or --ymd must be used with --publish!") if args["yesterday"]: yesterday() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/dnas/scripts/parse_mrts.py b/dnas/scripts/parse_mrts.py index 32904c1..64bf219 100755 --- a/dnas/scripts/parse_mrts.py +++ b/dnas/scripts/parse_mrts.py @@ -4,32 +4,29 @@ import datetime import glob import logging -import mrtparse # type: ignore import multiprocessing -from multiprocessing import Pool import os import sys import time -from typing import Any, Dict, List +import typing +from multiprocessing import Pool # Accomodate the use of the dnas library, even when the library isn't installed sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.config import config as cfg from dnas.log import log -from dnas.mrt_archives import mrt_archives from dnas.mrt_archive import mrt_archive -from dnas.mrt_stats import mrt_stats +from dnas.mrt_archives import mrt_archives from dnas.mrt_parser import mrt_parser -from dnas.mrt_splitter import mrt_splitter, MrtFormatError +from dnas.mrt_splitter import MrtFormatError, mrt_splitter +from dnas.mrt_stats import mrt_stats from dnas.redis_db import redis_db -def continuous(args: Dict[str, Any] = None): + +def continuous(args: dict) -> None: """ Continuously parse new MRT files as they are download from the configured MRT archives. This function simply globs for all MRTs that match todays "ymd" @@ -39,21 +36,16 @@ def continuous(args: Dict[str, Any] = None): previous day up until $delta past midnight each day. """ if not args: - raise ValueError( - f"Missing required arguments: args={args}" - ) + raise ValueError(f"Missing required arguments: args={args}") if type(args) != dict: - raise TypeError( - f"args is not a dict: {type(args)}" - ) + raise TypeError(f"args is not a dict: {type(args)}") mrt_a = mrt_archives() min_interval = cfg.DFT_INTERVAL - while(True): - - delta = datetime.timedelta(minutes = 90) + while True: + delta = datetime.timedelta(minutes=90) glob_ymd = datetime.datetime.strftime( datetime.datetime.utcnow() - delta, cfg.DAY_FORMAT ) @@ -61,13 +53,16 @@ def continuous(args: Dict[str, Any] = None): filelist = [] for arch in mrt_a.archives: - if (args["enabled"] and not arch.ENABLED): + if args["enabled"] and not arch.ENABLED: continue logging.debug(f"Archive {arch.NAME} is enabled") - if args["rib"]: - glob_str = str(arch.MRT_DIR + "/").replace("///", "/").replace("//", "/") + glob_str = ( + str(arch.MRT_DIR + "/") + .replace("///", "/") + .replace("//", "/") + ) glob_str += arch.RIB_PREFIX + "*" + glob_ymd + "*" filelist.extend(glob.glob(glob_str)) @@ -76,19 +71,23 @@ def continuous(args: Dict[str, Any] = None): which provides the most frequent dumps: """ if (arch.RIB_INTERVAL * 60) < min_interval: - min_interval = (arch.RIB_INTERVAL * 60) + min_interval = arch.RIB_INTERVAL * 60 logging.debug( f"Parse interval set to {min_interval} by " f"{arch.NAME} RIB interval" ) if args["update"]: - glob_str = str(arch.MRT_DIR + "/").replace("///", "/").replace("//", "/") + glob_str = ( + str(arch.MRT_DIR + "/") + .replace("///", "/") + .replace("//", "/") + ) glob_str += arch.UPD_PREFIX + "*" + glob_ymd + "*" filelist.extend(glob.glob(glob_str)) if (arch.UPD_INTERVAL * 60) < min_interval: - min_interval = (arch.UPD_INTERVAL * 60) + min_interval = arch.UPD_INTERVAL * 60 logging.debug( f"Parse interval set to {min_interval} by " f"{arch.NAME} UPD interval" @@ -100,7 +99,8 @@ def continuous(args: Dict[str, Any] = None): time.sleep(min_interval) -def parse_args(): + +def parse_args() -> dict: """ Parse the CLI args to this script. """ @@ -219,22 +219,18 @@ def parse_args(): return vars(parser.parse_args()) + def parse_file( - filename: str = None, keep_chunks: bool = False, - multi: bool = True - ) -> 'mrt_stats': + filename: str, keep_chunks: bool = False, multi: bool = True +) -> "mrt_stats": """ Split and parse an individual MRT file, return the mrt_stats. """ if not filename: - raise ValueError( - f"Missing required arguments: filename={filename}." - ) + raise ValueError(f"Missing required arguments: filename={filename}.") if type(filename) != str: - raise TypeError( - f"filename is not a string: {type(filename)}" - ) + raise TypeError(f"filename is not a string: {type(filename)}") mrt_a = mrt_archives() logging.info(f"Processing {filename}...") @@ -256,13 +252,12 @@ def parse_file( return mrt_stats() if multi: - no_cpu = multiprocessing.cpu_count() + no_cpu = multiprocessing.cpu_count() Pool = multiprocessing.Pool(no_cpu) splitter = mrt_splitter(filename) num_entries, file_chunks = splitter.split( - no_chunks=no_cpu, - outdir=cfg.SPLIT_DIR + no_chunks=no_cpu, outdir=cfg.SPLIT_DIR ) try: splitter.close() @@ -299,7 +294,8 @@ def parse_file( return mrt_s -def parse_files(filelist: List[str] = None, args: Dict[str, Any] = None): + +def parse_files(filelist: list[str], args: dict) -> None: """ A wrapper around the single file parsing function parse_file(), which accepts a list of files to parse. @@ -310,9 +306,7 @@ def parse_files(filelist: List[str] = None, args: Dict[str, Any] = None): ) if type(filelist) != list: - raise TypeError( - f"filelist is not a list: {type(filelist)}" - ) + raise TypeError(f"filelist is not a list: {type(filelist)}") rdb = redis_db() mrt_a = mrt_archives() @@ -342,16 +336,19 @@ def parse_files(filelist: List[str] = None, args: Dict[str, Any] = None): ) continue except EOFError as e: - logging.error( - f"Unable to split {file}, unexpected EOF: {e}" - ) + logging.error(f"Unable to split {file}, unexpected EOF: {e}") os.remove(file) logging.error(f"Deleted {file}") continue if day_stats: if day_stats.add(mrt_s): - day_stats.add_archive(arch.NAME) + if arch: + day_stats.add_archive(arch.NAME) + else: + logging.warning( + f"Unable to add archive name to stats object" + ) logging.info(f"Added {file} to {day_key}") elif file not in day_stats.file_list: logging.info(f"Added {file} to {day_key} file list") @@ -359,7 +356,10 @@ def parse_files(filelist: List[str] = None, args: Dict[str, Any] = None): rdb.set_stats(day_key, day_stats) else: - mrt_s.add_archive(arch.NAME) + if arch: + mrt_s.add_archive(arch.NAME) + else: + logging.warning(f"Unable to add archive name to stats object") rdb.set_stats(day_key, mrt_s) logging.info(f"Created new entry {day_key} from {file}") @@ -371,27 +371,24 @@ def parse_files(filelist: List[str] = None, args: Dict[str, Any] = None): rdb.close() -def process_day(args: Dict[str, Any] = None): + +def process_day(args: dict) -> None: """ Build the list of files to be parsed and pass them to the parser function. This function builds a list MRT files from a specific day, from eligble MRT archives. """ - if (not args): - raise ValueError( - f"Missing required arguments: args={args}" - ) + if not args: + raise ValueError(f"Missing required arguments: args={args}") - if (not args["ymd"]): - raise ValueError( - f"Missing required arguments: ymd={args['ymd']}" - ) + if not args["ymd"]: + raise ValueError(f"Missing required arguments: ymd={args['ymd']}") mrt_archive.valid_ymd(args["ymd"]) mrt_a = mrt_archives() filelist = [] for arch in mrt_a.archives: - if (args["enabled"] and not arch.ENABLED): + if args["enabled"] and not arch.ENABLED: continue logging.debug(f"Checking archive {arch.NAME}...") @@ -409,21 +406,18 @@ def process_day(args: Dict[str, Any] = None): parse_files(filelist=filelist, args=args) -def process_mrt_file(args: Dict[str, Any] = None): + +def process_mrt_file(args: dict) -> None: """ Pass a single filename to the parser function. """ if not args: - raise ValueError( - f"Missing required arguments: args={args}" - ) + raise ValueError(f"Missing required arguments: args={args}") filename = args["single"] if type(filename) != str: - raise TypeError( - f"filename is not a string: {type(filename)}" - ) + raise TypeError(f"filename is not a string: {type(filename)}") mrt_a = mrt_archives() arch = mrt_a.arch_from_file_path(filename) @@ -433,26 +427,23 @@ def process_mrt_file(args: Dict[str, Any] = None): else: exit(1) -def process_mrt_glob(args: Dict[str, Any] = None): + +def process_mrt_glob(args: dict) -> None: """ Build the list of files to be parsed based on a file glob, then pass them to the parser function. This function builds a list of all available MRT files from all eligble MRT archives. """ - if (not args): - raise ValueError( - f"Missing required arguments: args={args}" - ) + if not args: + raise ValueError(f"Missing required arguments: args={args}") if type(args) != dict: - raise TypeError( - f"args is not a dict: {type(args)}" - ) + raise TypeError(f"args is not a dict: {type(args)}") mrt_a = mrt_archives() filelist = [] for arch in mrt_a.archives: - if (args["enabled"] and not arch.ENABLED): + if args["enabled"] and not arch.ENABLED: continue logging.debug(f"Checking archive {arch.NAME}...") @@ -474,17 +465,16 @@ def process_mrt_glob(args: Dict[str, Any] = None): parse_files(filelist=filelist, args=args) -def process_range(args: Dict[str, Any] = None): + +def process_range(args: dict) -> None: """ Build a list of MRT files between the --start and --end dates inclusive to pass to the MRT parser function. """ - if (not args): - raise ValueError( - f"Missing required arguments: args={args}" - ) + if not args: + raise ValueError(f"Missing required arguments: args={args}") - if (not args["start"] and not args["end"]): + if not args["start"] and not args["end"]: raise ValueError( "Both --start and --end must be specified when using --range" ) @@ -504,22 +494,20 @@ def process_range(args: Dict[str, Any] = None): filelist = [] for i in range(0, diff.days + 1): - delta = datetime.timedelta(days=i) ymd = datetime.datetime.strftime(start_time + delta, cfg.DAY_FORMAT) for arch in mrt_a.archives: - if (args["enabled"] and not arch.ENABLED): + if args["enabled"] and not arch.ENABLED: continue logging.debug(f"Checking archive {arch.NAME} on {ymd}...") if args["rib"]: - rib_filenames = arch.gen_rib_fns_day(ymd) for filename in rib_filenames[:]: timestamp = arch.ts_from_filename(filename) - if (timestamp < start_time or timestamp > end_time): + if timestamp < start_time or timestamp > end_time: rib_filenames.remove(filename) if not rib_filenames: @@ -531,15 +519,16 @@ def process_range(args: Dict[str, Any] = None): ) logging.debug(f"Adding {rib_filenames}") for file in rib_filenames: - filelist.append(os.path.normpath(arch.MRT_DIR + "/" + file)) + filelist.append( + os.path.normpath(arch.MRT_DIR + "/" + file) + ) if args["update"]: - upd_filenames = arch.gen_upd_fns_day(ymd) for filename in upd_filenames[:]: timestamp = arch.ts_from_filename(filename) - if (timestamp < start_time or timestamp > end_time): + if timestamp < start_time or timestamp > end_time: upd_filenames.remove(filename) if not upd_filenames: @@ -551,7 +540,9 @@ def process_range(args: Dict[str, Any] = None): ) logging.debug(f"Adding {upd_filenames}") for file in upd_filenames: - filelist.append(os.path.normpath(arch.MRT_DIR + "/" + file)) + filelist.append( + os.path.normpath(arch.MRT_DIR + "/" + file) + ) if not filelist: logging.info(f"No files found to process") @@ -559,17 +550,21 @@ def process_range(args: Dict[str, Any] = None): parse_files(filelist=filelist, args=args) -def main(): +def main(): args = parse_args() log.setup( - debug = args["debug"], - log_src = "MRT parser script", - log_path = cfg.LOG_PARSER, + debug=args["debug"], + log_src="MRT parser script", + log_path=cfg.LOG_PARSER, ) - if (not args["continuous"] and not args["range"] and not args["single"] and - not args["yesterday"] and not args["ymd"] + if ( + not args["continuous"] + and not args["range"] + and not args["single"] + and not args["yesterday"] + and not args["ymd"] ): raise ValueError( "At least one of --continuous, --range, --single, --yesterday, or " @@ -602,5 +597,6 @@ def main(): else: process_mrt_glob(args) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/dnas/scripts/redis_mgmt.py b/dnas/scripts/redis_mgmt.py index ecaa8fc..e5cc3cc 100755 --- a/dnas/scripts/redis_mgmt.py +++ b/dnas/scripts/redis_mgmt.py @@ -2,64 +2,59 @@ import argparse import logging -import pprint import os +import pprint import sys -from typing import List +import typing # Accomodate the use of the dnas library, even when the library isn't installed sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.config import config as cfg from dnas.log import log -from dnas.redis_db import redis_db from dnas.mrt_stats import mrt_stats +from dnas.redis_db import redis_db rdb = redis_db() -def delete(key: str = None): + +def delete(key: str) -> None: """ Delete they key/value pair from redis stored under key. """ if not key: - raise ValueError( - f"Missing required arguments: key={key}" - ) + raise ValueError(f"Missing required arguments: key={key}") if rdb.delete(key): logging.info(f"Deleted {key}") else: logging.info(f"Nothing to delete for {key}") -def dump_json(filename: str = None): + +def dump_json(filename: str) -> None: """ Dump the entire redis DB to a JSON file. """ if not filename: - raise ValueError( - f"Missing required arguments: filename={filename}" - ) + raise ValueError(f"Missing required arguments: filename={filename}") rdb.to_file(filename) logging.info(f"Written DB dump to {filename}") -def load_json(filename: str = None): + +def load_json(filename: str) -> None: """ Import a JOSN dump into redis. """ if not filename: - raise ValueError( - f"Missing required arguments: filename={filename}" - ) + raise ValueError(f"Missing required arguments: filename={filename}") rdb.from_file(filename) -def parse_args(): + +def parse_args() -> dict: """ Parse the CLI args to this script. """ @@ -166,42 +161,40 @@ def parse_args(): return vars(parser.parse_args()) -def pprint_key(key: str = None): + +def pprint_key(key: str) -> None: """ Print the value stored in redis at the given key. """ if not key: - raise ValueError( - f"Missing required arguments: key={key}" - ) + raise ValueError(f"Missing required arguments: key={key}") pp = pprint.PrettyPrinter(indent=2) pp.pprint(rdb.get(key)) -def print_key(key: str = None): + +def print_key(key: str) -> None: """ Print the value stored in redis at the given key. """ if not key: - raise ValueError( - f"Missing required arguments: key={key}" - ) + raise ValueError(f"Missing required arguments: key={key}") print(rdb.get(key)) -def print_keys(): + +def print_keys() -> None: """ Print all the keys in the redis DB. """ print(rdb.get_keys("*")) -def print_stats(key: str = None): + +def print_stats(key: str) -> None: """ Print an mrt stats object stored in redis, based on the passed key. """ if not key: - raise ValueError( - f"Missing required arguments: key={key}" - ) + raise ValueError(f"Missing required arguments: key={key}") mrt_s = rdb.get_stats(key) if mrt_s: @@ -209,14 +202,13 @@ def print_stats(key: str = None): else: print(f"No stats stored in redis under key {key}") -def print_stats_daily(ymd: str = None): + +def print_stats_daily(ymd: str) -> None: """ Print the mrt stats object from a specific day stored in redis. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}") mrt_s = rdb.get_stats(mrt_stats.gen_daily_key(ymd)) if mrt_s: @@ -224,20 +216,17 @@ def print_stats_daily(ymd: str = None): else: print(f"No stats stored in redis for day {ymd}") -def print_stats_diff(keys: List[str] = None): + +def print_stats_diff(keys: list[str]) -> None: """ Print the diff of two mrt stats objects stored in redis at the two passed keys. """ - if (not keys): - raise ValueError( - f"Missing required arguments: keys={keys}" - ) + if not keys: + raise ValueError(f"Missing required arguments: keys={keys}") if len(keys) != 2: - raise ValueError( - f"Exactly two keys must be provided: keys={keys}" - ) + raise ValueError(f"Exactly two keys must be provided: keys={keys}") mrt_s_1 = rdb.get_stats(keys[0]) mrt_s_2 = rdb.get_stats(keys[1]) @@ -254,7 +243,8 @@ def print_stats_diff(keys: List[str] = None): else: print(f"Stats objects are equal") -def print_stats_global(): + +def print_stats_global() -> None: """ Print the global stats object stored in redis. """ @@ -264,7 +254,8 @@ def print_stats_global(): else: print(f"No global stats stored in redis") -def wipe(): + +def wipe() -> None: """ Wipe the entire redis DB. """ @@ -272,13 +263,13 @@ def wipe(): rdb.delete(k) logging.info(f"Database wiped") -def main(): +def main(): args = parse_args() log.setup( - debug = args["debug"], - log_src = "Redis management script", - log_path = cfg.LOG_REDIS, + debug=args["debug"], + log_src="Redis management script", + log_path=cfg.LOG_REDIS, ) if args["dump"]: @@ -314,5 +305,6 @@ def main(): rdb.close() -if __name__ == '__main__': - main() \ No newline at end of file + +if __name__ == "__main__": + main() diff --git a/dnas/scripts/split_mrt.py b/dnas/scripts/split_mrt.py index f23b2a6..8f474cd 100755 --- a/dnas/scripts/split_mrt.py +++ b/dnas/scripts/split_mrt.py @@ -4,20 +4,19 @@ import logging import os import sys +import typing # Accomodate the use of the dnas library, even when the library isn't installed sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.config import config as cfg from dnas.log import log from dnas.mrt_splitter import mrt_splitter -def parse_args(): + +def parse_args() -> dict: """ Parse the CLI args to this script. """ @@ -49,7 +48,8 @@ def parse_args(): ) return vars(parser.parse_args()) -def split(filename: str = None, num_chunks: int = None): + +def split(filename: str, num_chunks: int) -> None: """ Split an MRT file into N equal sized files ("chunks"). """ @@ -61,24 +61,28 @@ def split(filename: str = None, num_chunks: int = None): splitter = mrt_splitter(filename) try: - num_entires, chunk_names = splitter.split(num_chunks) + num_entires, chunk_names = splitter.split( + no_chunks=num_chunks, outdir=os.path.dirname(splitter.filename) + ) except EOFError as e: logging.error(f"Unable to split {filename}, unexpeted EOF") raise - logging.info(f"Split {num_entires} MRT entries into {len(chunk_names)} files:") + logging.info( + f"Split {num_entires} MRT entries into {len(chunk_names)} files:" + ) logging.info(chunk_names) -def main(): +def main(): args = parse_args() log.setup( - debug = args["debug"], - log_src = "MRT splitter script", - log_path = cfg.LOG_SPLITTER, + debug=args["debug"], + log_src="MRT splitter script", + log_path=cfg.LOG_SPLITTER, ) split(args["filename"], int(args["chunks"])) -if __name__ == '__main__': - main() \ No newline at end of file +if __name__ == "__main__": + main() diff --git a/dnas/scripts/stats.py b/dnas/scripts/stats.py index af95e7b..2050ae1 100755 --- a/dnas/scripts/stats.py +++ b/dnas/scripts/stats.py @@ -5,40 +5,36 @@ import logging import os import sys -from typing import Any, Dict +import typing # Accomodate the use of the dnas library, even when the library isn't installed sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.config import config as cfg from dnas.log import log -from dnas.redis_db import redis_db -from dnas.mrt_archives import mrt_archives from dnas.mrt_archive import mrt_archive -from dnas.mrt_stats import mrt_stats +from dnas.mrt_archives import mrt_archives from dnas.mrt_entry import mrt_entry +from dnas.mrt_stats import mrt_stats +from dnas.redis_db import redis_db + def gen_day_stats( - enabled: bool = False, - rib: bool = False, - update: bool = False, - ymd: str = None - ): + ymd: str, + enabled: bool = False, + rib: bool = False, + update: bool = False, +) -> None: """ Generate the global stats for a specific day, by merging the stats obj from each MRT archive of that day. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}, use --ymd" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}, use --ymd") - if (not rib and not update): + if not rib and not update: raise ValueError( "At least one of --rib and/or --update must be used with " "--daily" @@ -51,7 +47,7 @@ def gen_day_stats( day_keys = [] for arch in mrt_a.archives: - if (enabled and not arch.ENABLED): + if enabled and not arch.ENABLED: continue logging.debug(f"Archive {arch.NAME} is enabled") @@ -113,21 +109,18 @@ def gen_day_stats( f"{day_key}" ) else: - logging.info( - f"No update to exsiting {ymd} stats under {day_key}" - ) + logging.info(f"No update to exsiting {ymd} stats under {day_key}") rdb.close() -def gen_diff(ymd: str = None): + +def gen_diff(ymd: str) -> None: """ Generate and store the diff of a daily stats object, with the daily stats from the day before. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}, use --ymd" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}, use --ymd") rdb = redis_db() @@ -177,21 +170,20 @@ def gen_diff(ymd: str = None): rdb.close() -def gen_range(args: Dict[str, Any] = None): + +def gen_range(args: dict) -> None: """ Generate global stats for a range of days from --start to --end inclusive. """ - if (not args): - raise ValueError( - f"Missing required arguments: args={args}" - ) + if not args: + raise ValueError(f"Missing required arguments: args={args}") - if (not args["start"] and not args["end"]): + if not args["start"] and not args["end"]: raise ValueError( "Both --start and --end must be specified when using --range" ) - if (type(args["end"]) != str and type(args["start"]) != str): + if type(args["end"]) != str and type(args["start"]) != str: raise TypeError( f"Both end and start must be strings, not: {type(args['end'])} " f"and {type(args['start'])}" @@ -216,15 +208,14 @@ def gen_range(args: Dict[str, Any] = None): ymd = datetime.datetime.strftime(start_day + delta, cfg.DAY_FORMAT) if args["daily"]: - gen_day_stats( - rib = args["rib"], update = args["update"], ymd = ymd - ) + gen_day_stats(rib=args["rib"], update=args["update"], ymd=ymd) if args["diff"]: gen_diff(ymd) if args["global"]: upd_global_with_day(ymd) -def parse_args(): + +def parse_args() -> dict: """ Parse the CLI args to this script. """ @@ -324,15 +315,14 @@ def parse_args(): ) return vars(parser.parse_args()) -def upd_global_with_day(ymd: str = None): + +def upd_global_with_day(ymd: str) -> None: """ Update the running global stats object with the global stats from a specific day. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}, use --ymd" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}, use --ymd") rdb = redis_db() global_key = mrt_stats.gen_global_key() @@ -343,8 +333,7 @@ def upd_global_with_day(ymd: str = None): if not day_stats: logging.info( - f"No existing day stats for {ymd} in redis. Nothing to " - "update" + f"No existing day stats for {ymd} in redis. Nothing to " "update" ) return @@ -354,14 +343,12 @@ def upd_global_with_day(ymd: str = None): f"stats for {ymd}" ) rdb.set_stats(global_key, day_stats) - + # Else there are global stats and day stats to merge else: if global_stats.merge(day_stats): global_stats.merge_archives(day_stats) - logging.info( - f"Global stats merged with day stats from {ymd}" - ) + logging.info(f"Global stats merged with day stats from {ymd}") rdb.set_stats(global_key, global_stats) else: logging.info( @@ -370,17 +357,20 @@ def upd_global_with_day(ymd: str = None): rdb.close() -def main(): +def main(): args = parse_args() log.setup( - debug = args["debug"], - log_src = "global stats compiler script", - log_path = cfg.LOG_STATS, + debug=args["debug"], + log_src="global stats compiler script", + log_path=cfg.LOG_STATS, ) - if (not args["daily"] and not args["diff"] and not args["global"] and - not args["yesterday"] + if ( + not args["daily"] + and not args["diff"] + and not args["global"] + and not args["yesterday"] ): raise ValueError( "At least one of --daily, --diff, --global or --yesterday must be " @@ -389,10 +379,10 @@ def main(): if args["daily"] and not args["range"]: gen_day_stats( - enabled = args["enabled"], - rib = args["rib"], - update = args["update"], - ymd = args["ymd"] + enabled=args["enabled"], + rib=args["rib"], + update=args["update"], + ymd=args["ymd"], ) if args["diff"] and not args["range"]: @@ -410,11 +400,12 @@ def main(): datetime.datetime.now() - delta, cfg.DAY_FORMAT ) gen_day_stats( - enabled = args["enabled"], - rib = args["rib"], - update = args["update"], - ymd = yesterday + enabled=args["enabled"], + rib=args["rib"], + update=args["update"], + ymd=yesterday, ) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/dnas/scripts/test_mrt.py b/dnas/scripts/test_mrt.py index 76907ba..39b265e 100755 --- a/dnas/scripts/test_mrt.py +++ b/dnas/scripts/test_mrt.py @@ -4,16 +4,15 @@ import errno import json import logging -import mrtparse # type: ignore import os import sys +import typing + +import mrtparse # type: ignore # Accomodate the use of the dnas library, even when the library isn't installed sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.config import config as cfg @@ -21,7 +20,8 @@ from dnas.mrt_parser import mrt_parser from dnas.mrt_stats import mrt_stats -def check_rib_dump(filename: str = None): + +def check_rib_dump(filename: str) -> None: """ Perform some basic checks to determin if this is a valid MRT RIB dump. """ @@ -36,7 +36,7 @@ def check_rib_dump(filename: str = None): mrt_entries = mrtparse.Reader(filename) for idx, mrt_e in enumerate(mrt_entries): rib_type = list(mrt_e.data["type"].keys())[0] - if (rib_type != mrtparse.MRT_T['TABLE_DUMP_V2']): + if rib_type != mrtparse.MRT_T["TABLE_DUMP_V2"]: logging.error( f"Entry {idx} in {filename} is not type TABLE_DUMP_V2: " f"{mrt_e.data['type']}" @@ -56,7 +56,8 @@ def check_rib_dump(filename: str = None): logging.info(f"{filename} appears to be a valid RIB dump MRT file.") -def check_update_dump(filename: str = None): + +def check_update_dump(filename: str) -> None: """ Perform some basic checks to determin if this is a valid MRT UPDATE dump. @@ -72,8 +73,10 @@ def check_update_dump(filename: str = None): mrt_entries = mrtparse.Reader(filename) for idx, mrt_e in enumerate(mrt_entries): upd_type = list(mrt_e.data["type"].keys())[0] - if (upd_type != mrtparse.MRT_T['BGP4MP_ET'] and - upd_type != mrtparse.MRT_T['BGP4MP']): + if ( + upd_type != mrtparse.MRT_T["BGP4MP_ET"] + and upd_type != mrtparse.MRT_T["BGP4MP"] + ): logging.error( f"Entry {idx} in {filename} is not type BGP4MP_ET: " f"{mrt_e.data['type']}" @@ -93,7 +96,8 @@ def check_update_dump(filename: str = None): logging.info(f"{filename} appears to be a valid UPDATE dump MRT file.") -def get_stats(filename: str = None): + +def get_stats(filename: str) -> None: """ Print some basic statis about the entries in an MRT file. """ @@ -115,7 +119,6 @@ def get_stats(filename: str = None): mrt_entries = mrtparse.Reader(filename) for idx, mrt_e in enumerate(mrt_entries): - e_type = list(mrt_e.data["type"].keys())[0] if e_type not in e_types: e_types[e_type] = 1 @@ -131,7 +134,7 @@ def get_stats(filename: str = None): """ Some MRTs contain the BGP state change events """ - if (e_subtype != 1 and e_subtype != 4): + if e_subtype != 1 and e_subtype != 4: continue """ @@ -149,19 +152,20 @@ def get_stats(filename: str = None): else: e_msgtypes[e_msgtype] += 1 - if e_msgtype != 2: # UPDATE + if e_msgtype != 2: # UPDATE continue if len(mrt_e.data["bgp_message"]["nlri"]) > 0: for nlri in mrt_e.data["bgp_message"]["nlri"]: - prefixes.add( - nlri["prefix"] + "/" + str(nlri["length"]) - ) + prefixes.add(nlri["prefix"] + "/" + str(nlri["length"])) - if withdrawn_routes := mrt_e.data["bgp_message"].get("withdrawn_routes"): + if withdrawn_routes := mrt_e.data["bgp_message"].get( + "withdrawn_routes" + ): for withdrawn_route in withdrawn_routes: prefixes.add( - withdrawn_route["prefix"] + "/" + withdrawn_route["prefix"] + + "/" + str(withdrawn_route["length"]) ) @@ -174,48 +178,52 @@ def get_stats(filename: str = None): else: attrs[attr_t] += 1 - if attr_t == 2: # AS_PATH + if attr_t == 2: # AS_PATH if attr["value"][0]["value"] not in as_paths: as_paths.append(attr["value"][0]["value"]) origin_asns.add(attr["value"][0]["value"][-1]) - if attr_t == 14: # MP_REACH_NLRI -> IPV6_UNICAST + if attr_t == 14: # MP_REACH_NLRI -> IPV6_UNICAST for nlri in attr["value"]["nlri"]: prefixes.add( nlri["prefix"] + "/" + str(nlri["length"]) ) - elif attr_t == 15: # MP_UNREACH_NLRI -> IPV6_UNICAST - if withdrawn_routes := attr["value"].get("withdrawn_routes"): + elif attr_t == 15: # MP_UNREACH_NLRI -> IPV6_UNICAST + if withdrawn_routes := attr["value"].get( + "withdrawn_routes" + ): for withdrawn_route in withdrawn_routes: prefixes.add( - withdrawn_route["prefix"] + "/" + withdrawn_route["prefix"] + + "/" + str(withdrawn_route["length"]) ) logging.info(f"File {filename} contains {idx+1} entries") logging.info("Count per record type:") - for k,v in e_types.items(): + for k, v in e_types.items(): logging.info(f"{mrtparse.MRT_T[k]}: {v}") logging.info("Count per record sub-type:") - for k,v in e_subtypes.items(): + for k, v in e_subtypes.items(): logging.info(f"{mrtparse.BGP4MP_ST[k]}: {v}") logging.info("Count per BGP message type:") - for k,v in e_msgtypes.items(): + for k, v in e_msgtypes.items(): logging.info(f"{mrtparse.BGP_MSG_T[k]}: {v}") logging.info(f"Count per BGP attribute type:") - for k,v in attrs.items(): + for k, v in attrs.items(): logging.info(f"Attr {mrtparse.BGP_ATTR_T[k]}: {v}") logging.info(f"Unique AS paths: {len(as_paths)}") logging.info(f"Unique prefixes: {len(prefixes)}") logging.info(f"Unique origin ASNs: {len(origin_asns)}") -def parse_args(): + +def parse_args() -> dict: """ Parse the CLI args to this script. """ @@ -286,7 +294,8 @@ def parse_args(): return vars(parser.parse_args()) -def to_json(json_file: str = None, mrt_file: str = None): + +def to_json(json_file: str, mrt_file: str) -> None: """ Convert an MRT file to a JSON string and write to a file. """ @@ -304,7 +313,8 @@ def to_json(json_file: str = None, mrt_file: str = None): f.write(json.dumps(mrt_data, indent=2)) logging.info(f"Wrote JSON dump to {json_file}") -def to_json_parsed(rib: bool = False, json_file: str = None, mrt_file: str = None): + +def to_json_parsed(json_file: str, mrt_file: str, rib: bool = False) -> None: """ Parse an MRT file to generate stats, then write the stats as a JSON string to a file. @@ -318,7 +328,7 @@ def to_json_parsed(rib: bool = False, json_file: str = None, mrt_file: str = Non ######## TODO - Fix this hack so that mrt_parser doesn't care about the path cfg.SPLIT_DIR = "" - stats: 'mrt_stats' + stats: "mrt_stats" if rib: stats = mrt_parser.parse_rib_dump(mrt_file) else: @@ -328,13 +338,13 @@ def to_json_parsed(rib: bool = False, json_file: str = None, mrt_file: str = Non stats.to_file(json_file) logging.info(f"Wrote parsed JSON to {json_file}") -def main(): +def main(): args = parse_args() log.setup( - debug = args["debug"], - log_src = "MRT tester script", - log_path = cfg.LOG_TESTER, + debug=args["debug"], + log_src="MRT tester script", + log_path=cfg.LOG_TESTER, ) if not os.path.isfile(args["mrt"]): @@ -358,17 +368,18 @@ def main(): if args["to_json_rib"]: to_json_parsed( - rib = True, - json_file = args["to_json_rib"], - mrt_file = args["mrt"], + rib=True, + json_file=args["to_json_rib"], + mrt_file=args["mrt"], ) if args["to_json_update"]: to_json_parsed( - rib = False, - json_file = args["to_json_update"], - mrt_file = args["mrt"], + rib=False, + json_file=args["to_json_update"], + mrt_file=args["mrt"], ) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/dnas/scripts/tweet.py b/dnas/scripts/tweet.py index f05b981..1d56264 100755 --- a/dnas/scripts/tweet.py +++ b/dnas/scripts/tweet.py @@ -10,33 +10,30 @@ # Accomodate the use of the dnas library, even when the library isn't installed sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.config import config as cfg -from dnas.log import log from dnas.git import git +from dnas.log import log from dnas.mrt_stats import mrt_stats from dnas.redis_db import redis_db from dnas.twitter import twitter from dnas.twitter_msg import twitter_msg -def delete(tweet_id: int = None): + +def delete(tweet_id: int) -> None: """ Delete a Tweet from twitter.com """ if not tweet_id: - raise ValueError( - f"Missing required arguments: tweet_id={tweet_id}" - ) + raise ValueError(f"Missing required arguments: tweet_id={tweet_id}") t = twitter() t.delete(tweet_id) -def gen_tweets_yest(): + +def gen_tweets_yest() -> None: """ Generate Tweets based on for yesterday's stats changes and publish them. """ @@ -45,21 +42,18 @@ def gen_tweets_yest(): datetime.datetime.now() - delta, cfg.DAY_FORMAT ) gen_tweets(yesterday) - tweet(False, yesterday) + tweet(ymd=yesterday, print_only=False) + -def gen_tweets(ymd: str = None): +def gen_tweets(ymd: str) -> None: """ Generate Tweets based on stats for a specific day. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}, use --ymd" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}, use --ymd") if type(ymd) != str: - raise TypeError( - f"ymd is not an str: {type(ymd)}" - ) + raise TypeError(f"ymd is not an str: {type(ymd)}") rdb = redis_db() day_key = mrt_stats.gen_daily_key(ymd) @@ -78,26 +72,20 @@ def gen_tweets(ymd: str = None): f"{twitter_msg.gen_tweet_q_key(ymd)}" ) for msg in msg_q: - rdb.add_to_queue( - twitter_msg.gen_tweet_q_key(ymd), - msg.to_json() - ) + rdb.add_to_queue(twitter_msg.gen_tweet_q_key(ymd), msg.to_json()) rdb.close() -def tweet(print_only: bool = False, ymd: str = None): + +def tweet(ymd: str, print_only: bool = False) -> None: """ Tweet all the Tweets in the redis queue for a specific day. """ if not ymd: - raise ValueError( - f"Missing required arguments: ymd={ymd}, use --ymd" - ) + raise ValueError(f"Missing required arguments: ymd={ymd}, use --ymd") if type(ymd) != str: - raise TypeError( - f"ymd is not a string: {type(ymd)}" - ) + raise TypeError(f"ymd is not a string: {type(ymd)}") rdb = redis_db() t = twitter() @@ -113,7 +101,6 @@ def tweet(print_only: bool = False, ymd: str = None): t.tweet_hdr(thread_hdr, print_only) for tweet in tweet_q: - if tweet.hdr in [t.hdr for t in tweeted_q]: logging.debug(f"Skipping already tweeted message: {tweet.hdr}") continue @@ -124,17 +111,16 @@ def tweet(print_only: bool = False, ymd: str = None): continue rdb.add_to_queue( - twitter_msg.gen_tweeted_q_key(ymd), - tweet.to_json() + twitter_msg.gen_tweeted_q_key(ymd), tweet.to_json() ) rdb.del_from_queue( - twitter_msg.gen_tweet_q_key(ymd), - tweet.to_json() + twitter_msg.gen_tweet_q_key(ymd), tweet.to_json() ) rdb.close() -def parse_args(): + +def parse_args() -> dict: """ Parse the CLI args to this script. """ @@ -197,13 +183,13 @@ def parse_args(): ) return vars(parser.parse_args()) -def main(): +def main(): args = parse_args() log.setup( - debug = args["debug"], - log_src = "Tweet generation and posting script", - log_path = cfg.LOG_TWITTER, + debug=args["debug"], + log_src="Tweet generation and posting script", + log_path=cfg.LOG_TWITTER, ) if args["delete"]: @@ -218,5 +204,6 @@ def main(): if args["yesterday"]: gen_tweets_yest() -if __name__ == '__main__': - main() \ No newline at end of file + +if __name__ == "__main__": + main() diff --git a/dnas/tests/test_bogon_asn.py b/dnas/tests/test_bogon_asn.py index 2ff1b25..365fcdf 100644 --- a/dnas/tests/test_bogon_asn.py +++ b/dnas/tests/test_bogon_asn.py @@ -1,27 +1,26 @@ import os import sys +import typing import unittest sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.bogon_asn import bogon_asn -class test_bogon_asn(unittest.TestCase): +class test_bogon_asn(unittest.TestCase): ba = bogon_asn() - def test_init(self): + def test_init(self: "test_bogon_asn") -> None: self.assertIsInstance(self.ba, bogon_asn) - def test_is_bogon(self): + def test_is_bogon(self: "test_bogon_asn") -> None: self.assertRaises(TypeError, self.ba.is_bogon) self.assertRaises(TypeError, self.ba.is_bogon, "abc") self.assertEqual(self.ba.is_bogon(65535), True) self.assertEqual(self.ba.is_bogon(1234567890), False) -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/dnas/tests/test_bogon_attr.py b/dnas/tests/test_bogon_attr.py index 4a1c6a5..a15d350 100644 --- a/dnas/tests/test_bogon_attr.py +++ b/dnas/tests/test_bogon_attr.py @@ -1,28 +1,27 @@ import os import sys +import typing import unittest sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.bogon_attr import bogon_attr -class test_bogon_attr(unittest.TestCase): +class test_bogon_attr(unittest.TestCase): ba = bogon_attr() - def test_init(self): + def test_init(self: "test_bogon_attr") -> None: self.assertIsInstance(self.ba, bogon_attr) - def test_is_unknown(self): + def test_is_unknown(self: "test_bogon_attr") -> None: self.assertRaises(TypeError, self.ba.is_unknown) self.assertRaises(TypeError, self.ba.is_unknown, "abc") for attr in self.ba.known_attrs: self.assertEqual(self.ba.is_unknown(attr), False) self.assertEqual(self.ba.is_unknown(11), True) -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/dnas/tests/test_bogon_ip.py b/dnas/tests/test_bogon_ip.py index dd0d7f5..f09793f 100644 --- a/dnas/tests/test_bogon_ip.py +++ b/dnas/tests/test_bogon_ip.py @@ -1,48 +1,51 @@ import ipaddress import os import sys +import typing import unittest sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) -from dnas.config import config from dnas.bogon_ip import bogon_ip +from dnas.config import config -class test_bogon_ip(unittest.TestCase): +class test_bogon_ip(unittest.TestCase): cfg = config() bi = bogon_ip() - def test_init(self): + def test_init(self: "test_bogon_ip") -> None: self.assertIsInstance(self.bi, bogon_ip) self.assertIsInstance(self.bi.BOGON_V4_NETS, list) self.assertTrue(self.bi.BOGON_V4_NETS) - for net in self.bi.BOGON_V4_NETS: - self.assertIsInstance(net, ipaddress.IPv4Network) + for net_v4 in self.bi.BOGON_V4_NETS: + self.assertIsInstance(net_v4, ipaddress.IPv4Network) self.assertIsInstance(self.bi.BOGON_V6_NETS, list) self.assertTrue(self.bi.BOGON_V6_NETS) - for net in self.bi.BOGON_V6_NETS: - self.assertIsInstance(net, ipaddress.IPv6Network) + for net_v6 in self.bi.BOGON_V6_NETS: + self.assertIsInstance(net_v6, ipaddress.IPv6Network) - def test_is_v4_bogon(self): + def test_is_v4_bogon(self: "test_bogon_ip") -> None: self.assertRaises(ValueError, self.bi.is_v4_bogon, None) self.assertRaises(TypeError, self.bi.is_v4_bogon, 123) self.assertRaises(ValueError, self.bi.is_v4_bogon, "555.555.555.555") self.assertEqual(self.bi.is_v4_bogon("192.168.0.0/24"), True) self.assertEqual(self.bi.is_v4_bogon("11.22.33.0/24"), False) - def test_is_v6_bogon(self): + def test_is_v6_bogon(self: "test_bogon_ip") -> None: self.assertRaises(ValueError, self.bi.is_v6_bogon, None) self.assertRaises(TypeError, self.bi.is_v6_bogon, 123) - self.assertRaises(ValueError, self.bi.is_v6_bogon, "HHHH:HHHH:HHHH:HHHH::/64") + self.assertRaises( + ValueError, self.bi.is_v6_bogon, "HHHH:HHHH:HHHH:HHHH::/64" + ) self.assertEqual(self.bi.is_v6_bogon("2001:db8:ABCD::/48"), True) - self.assertEqual(self.bi.is_v6_bogon("ABCD:ABCD:ABCD:ABCD::/64"), False) + self.assertEqual( + self.bi.is_v6_bogon("ABCD:ABCD:ABCD:ABCD::/64"), False + ) + -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/dnas/tests/test_git.py b/dnas/tests/test_git.py index cdbdb2e..aaa26c9 100644 --- a/dnas/tests/test_git.py +++ b/dnas/tests/test_git.py @@ -4,6 +4,7 @@ import shutil import subprocess import sys +import typing import unittest sys.path.append( @@ -14,12 +15,11 @@ class test_git(unittest.TestCase): - cfg = config() g = git() test_filename = "abc123" - def setUp(self): + def setUp(self: "test_git") -> None: """ Ensure a fresh local copy of the git repo is available """ @@ -30,7 +30,7 @@ def setUp(self): os.makedirs(self.cfg.GIT_BASE, exist_ok=False) self.g.clone() - def test_add(self): + def test_add(self: "test_git") -> None: self.assertRaises(ValueError, self.g.add, None) self.assertRaises(TypeError, self.g.add, 123) @@ -74,7 +74,7 @@ def test_add(self): os.remove(os.path.join(self.cfg.GIT_BASE, self.test_filename)) self.g.clear() - def test_clean(self): + def test_clean(self: "test_git") -> None: """ Add an untracked file to the repo and test it is removed """ @@ -120,7 +120,7 @@ def test_clean(self): assert self.test_filename not in ret.stdout.decode() - def test_clear(self): + def test_clear(self: "test_git") -> None: """ Stage, then de-stage a test file, and then check the git stage is empty """ @@ -146,24 +146,14 @@ def test_clear(self): cwd=self.cfg.GIT_BASE, capture_output=True, ) + assert ret.returncode == 0 - if ret.returncode != 0: - raise ChildProcessError( - f"Couldn't check git index status:\n" - f"args: {ret.args}\n" - f"stdout: {ret.stdout.decode()}\n" - f"stderr: {ret.stderr.decode()}" - ) - - self.assertTrue( - re.search(f"nothing added to commit", ret.stdout.decode()) - ) - try: - os.remove(os.path.join(self.cfg.GIT_BASE, self.test_filename)) - except FileNotFoundError: - pass + en = re.search("nothing added to commit", ret.stdout.decode()) + de = re.search("nichts zum Commit", ret.stdout.decode()) + self.assertTrue(de or en) + os.remove(os.path.join(self.cfg.GIT_BASE, self.test_filename)) - def test_clone(self): + def test_clone(self: "test_git") -> None: """ Git clone should fail if there is an existing directory, that contains a different repo. Create the base direcory with an empty .git sub-dir @@ -190,7 +180,7 @@ def test_clone(self): asserted = True self.assertEqual(asserted, False) - def test_commit(self): + def test_commit(self: "test_git") -> None: self.assertRaises(ValueError, self.g.commit, None) self.assertRaises(TypeError, self.g.commit, 123) @@ -213,7 +203,7 @@ def test_commit(self): self.assertEqual(asserted, False) os.remove(os.path.join(self.cfg.GIT_BASE, self.test_filename)) - def test_git_diff(self): + def test_git_diff(self: "test_git") -> None: self.assertFalse(self.g.diff()) f = open(os.path.join(self.cfg.GIT_BASE, "README.md"), "a") f.write("abc123") @@ -236,7 +226,7 @@ def test_git_diff(self): f"stderr: {ret.stderr.decode()}" ) - def test_git_exists(self): + def test_git_exists(self: "test_git") -> None: # With no directory, exists() should fail try: shutil.rmtree(self.cfg.GIT_BASE) @@ -250,7 +240,7 @@ def test_git_exists(self): self.g.clone() self.assertTrue(self.g.git_exists()) - def test_gen_git_path_ymd(self): + def test_gen_git_path_ymd(self: "test_git") -> None: self.assertRaises(ValueError, self.g.gen_git_path_ymd, None) self.assertRaises(TypeError, self.g.gen_git_path_ymd, 123) self.assertEqual( @@ -258,7 +248,7 @@ def test_gen_git_path_ymd(self): self.cfg.GIT_BASE + "2022/04/01", ) - def test_gen_git_url_ymd(self): + def test_gen_git_url_ymd(self: "test_git") -> None: self.assertRaises(ValueError, self.g.gen_git_url_ymd, None) self.assertRaises(TypeError, self.g.gen_git_url_ymd, 123) self.assertEqual( @@ -266,7 +256,7 @@ def test_gen_git_url_ymd(self): self.cfg.GIT_STAT_BASE_URL + "2022/04/01", ) - def test_pull(self): + def test_pull(self: "test_git") -> None: # Nothing should happen if local is up to date with remote self.g.clear() @@ -277,7 +267,7 @@ def test_pull(self): asserted = True self.assertEqual(asserted, False) - def test_push(self): + def test_push(self: "test_git") -> None: # With no new commits to push, nothing should happen self.g.clear() @@ -289,5 +279,5 @@ def test_push(self): self.assertEqual(asserted, False) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main(verbosity=2) diff --git a/dnas/tests/test_mrt_archive.py b/dnas/tests/test_mrt_archive.py index db05ac7..b82d00c 100644 --- a/dnas/tests/test_mrt_archive.py +++ b/dnas/tests/test_mrt_archive.py @@ -2,6 +2,7 @@ import os import re import sys +import typing import unittest sys.path.append( @@ -12,7 +13,6 @@ class test_mrt_archive(unittest.TestCase): - cfg = config() AS57355_TYPE = "AS57355" @@ -33,6 +33,7 @@ class test_mrt_archive(unittest.TestCase): AS57355_UPD_PREFIX = "" AS57355_RIB_OFFSET = 0 AS57355_UPD_OFFSET = 120 + AS57355_STRIP_COMM = "" mrt_as57355 = mrt_archive( TYPE=AS57355_TYPE, @@ -53,6 +54,7 @@ class test_mrt_archive(unittest.TestCase): UPD_PREFIX=AS57355_UPD_PREFIX, RIB_OFFSET=AS57355_RIB_OFFSET, UPD_OFFSET=AS57355_UPD_OFFSET, + STRIP_COMM=AS57355_STRIP_COMM, ) RV_TYPE = "RV" @@ -73,6 +75,7 @@ class test_mrt_archive(unittest.TestCase): RV_UPD_PREFIX = "updates." RV_RIB_OFFSET = 120 RV_UPD_OFFSET = 120 + RV_STRIP_COMM = "" mrt_rv = mrt_archive( TYPE=RV_TYPE, @@ -93,6 +96,7 @@ class test_mrt_archive(unittest.TestCase): UPD_PREFIX=RV_UPD_PREFIX, RIB_OFFSET=RV_RIB_OFFSET, UPD_OFFSET=RV_UPD_OFFSET, + STRIP_COMM=RV_STRIP_COMM, ) RIPE_TYPE = "RIPE" @@ -113,6 +117,7 @@ class test_mrt_archive(unittest.TestCase): RIPE_UPD_PREFIX = "updates." RIPE_RIB_OFFSET = 0 RIPE_UPD_OFFSET = 120 + RIPE_STRIP_COMM = "" mrt_ripe = mrt_archive( TYPE=RIPE_TYPE, @@ -133,13 +138,13 @@ class test_mrt_archive(unittest.TestCase): UPD_PREFIX=RIPE_UPD_PREFIX, RIB_OFFSET=RIPE_RIB_OFFSET, UPD_OFFSET=RIPE_UPD_OFFSET, + STRIP_COMM=RIPE_STRIP_COMM, ) - def test_init(self): - + def test_init(self: "test_mrt_archive") -> None: with self.assertRaises(TypeError): fail = mrt_archive( - TYPE=123, + TYPE=123, # type: ignore NAME=self.RIPE_NAME, ENABLED=self.RIPE_ENABLED, BASE_URL=self.RIPE_BASE_URL, @@ -157,11 +162,12 @@ def test_init(self): UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) with self.assertRaises(TypeError): fail = mrt_archive( TYPE=self.RIPE_TYPE, - NAME=123, + NAME=123, # type: ignore ENABLED=self.RIPE_ENABLED, BASE_URL=self.RIPE_BASE_URL, RIB_URL=self.RIPE_RIB_URL, @@ -178,12 +184,13 @@ def test_init(self): UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) with self.assertRaises(TypeError): fail = mrt_archive( TYPE=self.RIPE_TYPE, NAME=self.RIPE_NAME, - ENABLED=123.456, + ENABLED=123.456, # type: ignore BASE_URL=self.RIPE_BASE_URL, RIB_URL=self.RIPE_RIB_URL, UPD_URL=self.RIPE_UPD_URL, @@ -199,13 +206,14 @@ def test_init(self): UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) with self.assertRaises(TypeError): fail = mrt_archive( TYPE=self.RIPE_TYPE, NAME=self.RIPE_NAME, ENABLED=self.RIPE_ENABLED, - BASE_URL=123, + BASE_URL=123, # type: ignore RIB_URL=self.RIPE_RIB_URL, UPD_URL=self.RIPE_UPD_URL, MRT_EXT=self.RIPE_MRT_EXT, @@ -220,6 +228,7 @@ def test_init(self): UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) with self.assertRaises(TypeError): fail = mrt_archive( @@ -227,7 +236,7 @@ def test_init(self): NAME=self.RIPE_NAME, ENABLED=self.RIPE_ENABLED, BASE_URL=self.RIPE_BASE_URL, - RIB_URL=123, + RIB_URL=123, # type: ignore UPD_URL=self.RIPE_UPD_URL, MRT_EXT=self.RIPE_MRT_EXT, MRT_DIR=self.RIPE_MRT_DIR, @@ -241,6 +250,7 @@ def test_init(self): UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) with self.assertRaises(TypeError): fail = mrt_archive( @@ -249,7 +259,7 @@ def test_init(self): ENABLED=self.RIPE_ENABLED, BASE_URL=self.RIPE_BASE_URL, RIB_URL=self.RIPE_RIB_URL, - UPD_URL=123, + UPD_URL=123, # type: ignore MRT_EXT=self.RIPE_MRT_EXT, MRT_DIR=self.RIPE_MRT_DIR, RIB_GLOB=self.RIPE_RIB_GLOB, @@ -262,6 +272,7 @@ def test_init(self): UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) with self.assertRaises(TypeError): fail = mrt_archive( @@ -271,7 +282,7 @@ def test_init(self): BASE_URL=self.RIPE_BASE_URL, RIB_URL=self.RIPE_RIB_URL, UPD_URL=self.RIPE_UPD_URL, - MRT_EXT=123, + MRT_EXT=123, # type: ignore MRT_DIR=self.RIPE_MRT_DIR, RIB_GLOB=self.RIPE_RIB_GLOB, UPD_GLOB=self.RIPE_UPD_GLOB, @@ -283,6 +294,7 @@ def test_init(self): UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) with self.assertRaises(TypeError): fail = mrt_archive( @@ -293,7 +305,7 @@ def test_init(self): RIB_URL=self.RIPE_RIB_URL, UPD_URL=self.RIPE_UPD_URL, MRT_EXT=self.RIPE_MRT_EXT, - MRT_DIR=123, + MRT_DIR=123, # type: ignore RIB_GLOB=self.RIPE_RIB_GLOB, UPD_GLOB=self.RIPE_UPD_GLOB, RIB_KEY=self.RIPE_RIB_KEY, @@ -304,6 +316,7 @@ def test_init(self): UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) with self.assertRaises(TypeError): fail = mrt_archive( @@ -315,7 +328,7 @@ def test_init(self): UPD_URL=self.RIPE_UPD_URL, MRT_EXT=self.RIPE_MRT_EXT, MRT_DIR=self.RIPE_MRT_DIR, - RIB_GLOB=123, + RIB_GLOB=123, # type: ignore UPD_GLOB=self.RIPE_UPD_GLOB, RIB_KEY=self.RIPE_RIB_KEY, UPD_KEY=self.RIPE_UPD_KEY, @@ -325,6 +338,7 @@ def test_init(self): UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) with self.assertRaises(TypeError): fail = mrt_archive( @@ -337,7 +351,7 @@ def test_init(self): MRT_EXT=self.RIPE_MRT_EXT, MRT_DIR=self.RIPE_MRT_DIR, RIB_GLOB=self.RIPE_RIB_GLOB, - UPD_GLOB=123, + UPD_GLOB=123, # type: ignore RIB_KEY=self.RIPE_RIB_KEY, UPD_KEY=self.RIPE_UPD_KEY, RIB_INTERVAL=self.RIPE_RIB_INTERVAL, @@ -346,6 +360,7 @@ def test_init(self): UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) with self.assertRaises(TypeError): fail = mrt_archive( @@ -359,7 +374,7 @@ def test_init(self): MRT_DIR=self.RIPE_MRT_DIR, RIB_GLOB=self.RIPE_RIB_GLOB, UPD_GLOB=self.RIPE_UPD_GLOB, - RIB_KEY=123, + RIB_KEY=123, # type: ignore UPD_KEY=self.RIPE_UPD_KEY, RIB_INTERVAL=self.RIPE_RIB_INTERVAL, UPD_INTERVAL=self.RIPE_UPD_INTERVAL, @@ -367,6 +382,7 @@ def test_init(self): UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) fail = mrt_archive( TYPE=self.RIPE_TYPE, @@ -380,13 +396,14 @@ def test_init(self): RIB_GLOB=self.RIPE_RIB_GLOB, UPD_GLOB=self.RIPE_UPD_GLOB, RIB_KEY=self.RIPE_RIB_KEY, - UPD_KEY=123, + UPD_KEY=123, # type: ignore RIB_INTERVAL=self.RIPE_RIB_INTERVAL, UPD_INTERVAL=self.RIPE_UPD_INTERVAL, RIB_PREFIX=self.RIPE_RIB_PREFIX, UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) fail = mrt_archive( TYPE=self.RIPE_TYPE, @@ -401,12 +418,13 @@ def test_init(self): UPD_GLOB=self.RIPE_UPD_GLOB, RIB_KEY=self.RIPE_RIB_KEY, UPD_KEY=self.RIPE_UPD_KEY, - RIB_INTERVAL="abc", + RIB_INTERVAL="abc", # type: ignore UPD_INTERVAL=self.RIPE_UPD_INTERVAL, RIB_PREFIX=self.RIPE_RIB_PREFIX, UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) fail = mrt_archive( TYPE=self.RIPE_TYPE, @@ -422,11 +440,12 @@ def test_init(self): RIB_KEY=self.RIPE_RIB_KEY, UPD_KEY=self.RIPE_UPD_KEY, RIB_INTERVAL=self.RIPE_RIB_INTERVAL, - UPD_INTERVAL="abc", + UPD_INTERVAL="abc", # type: ignore RIB_PREFIX=self.RIPE_RIB_PREFIX, UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) fail = mrt_archive( TYPE=self.RIPE_TYPE, @@ -443,10 +462,11 @@ def test_init(self): UPD_KEY=self.RIPE_UPD_KEY, RIB_INTERVAL=self.RIPE_RIB_INTERVAL, UPD_INTERVAL=self.RIPE_UPD_INTERVAL, - RIB_PREFIX=123, + RIB_PREFIX=123, # type: ignore UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) fail = mrt_archive( TYPE=self.RIPE_TYPE, @@ -464,9 +484,10 @@ def test_init(self): RIB_INTERVAL=self.RIPE_RIB_INTERVAL, UPD_INTERVAL=self.RIPE_UPD_INTERVAL, RIB_PREFIX=self.RIPE_RIB_PREFIX, - UPD_PREFIX=123, + UPD_PREFIX=123, # type: ignore RIB_OFFSET=self.RIPE_RIB_OFFSET, UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, ) fail = mrt_archive( TYPE=self.RIPE_TYPE, @@ -485,8 +506,30 @@ def test_init(self): UPD_INTERVAL=self.RIPE_UPD_INTERVAL, RIB_PREFIX=self.RIPE_RIB_PREFIX, UPD_PREFIX=self.RIPE_UPD_PREFIX, - RIB_OFFSET="abc", + RIB_OFFSET="abc", # type: ignore UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=self.RIPE_STRIP_COMM, + ) + fail = mrt_archive( + TYPE=self.RIPE_TYPE, + NAME=self.RIPE_NAME, + ENABLED=self.RIPE_ENABLED, + BASE_URL=self.RIPE_BASE_URL, + RIB_URL=self.RIPE_RIB_URL, + UPD_URL=self.RIPE_UPD_URL, + MRT_EXT=self.RIPE_MRT_EXT, + MRT_DIR=self.RIPE_MRT_DIR, + RIB_GLOB=self.RIPE_RIB_GLOB, + UPD_GLOB=self.RIPE_UPD_GLOB, + RIB_KEY=self.RIPE_RIB_KEY, + UPD_KEY=self.RIPE_UPD_KEY, + RIB_INTERVAL=self.RIPE_RIB_INTERVAL, + UPD_INTERVAL=self.RIPE_UPD_INTERVAL, + RIB_PREFIX=self.RIPE_RIB_PREFIX, + UPD_PREFIX=self.RIPE_UPD_PREFIX, + RIB_OFFSET=self.RIPE_RIB_OFFSET, + UPD_OFFSET="abc", # type: ignore + STRIP_COMM=self.RIPE_STRIP_COMM, ) fail = mrt_archive( TYPE=self.RIPE_TYPE, @@ -506,7 +549,8 @@ def test_init(self): RIB_PREFIX=self.RIPE_RIB_PREFIX, UPD_PREFIX=self.RIPE_UPD_PREFIX, RIB_OFFSET=self.RIPE_RIB_OFFSET, - UPD_OFFSET="abc", + UPD_OFFSET=self.RIPE_UPD_OFFSET, + STRIP_COMM=False, # type: ignore ) self.assertIsInstance(self.mrt_rv, mrt_archive) @@ -528,8 +572,9 @@ def test_init(self): self.assertIsInstance(self.mrt_rv.UPD_PREFIX, str) self.assertIsInstance(self.mrt_rv.RIB_OFFSET, int) self.assertIsInstance(self.mrt_rv.UPD_OFFSET, int) + self.assertIsInstance(self.mrt_rv.STRIP_COMM, str) - def test_concat_url(self): + def test_concat_url(self: "test_mrt_archive") -> None: url = "http://www.example.tld/path/to/file.abc" self.assertEqual( self.mrt_ripe.concat_url( @@ -571,7 +616,7 @@ def test_concat_url(self): near midnight). """ - def test_gen_latest_rib_fn(self): + def test_gen_latest_rib_fn(self: "test_mrt_archive") -> None: """ Check that the wrapper function calls the correct child function, and that an error is thrown if there is no matching child function: @@ -594,7 +639,7 @@ def test_gen_latest_rib_fn(self): self.assertRaises(ValueError, self.mrt_rv.gen_latest_rib_fn) self.mrt_rv.TYPE = self.RV_TYPE - def test_gen_latest_rib_fn_as57355(self): + def test_gen_latest_rib_fn_as57355(self: "test_mrt_archive") -> None: rib_name = self.mrt_as57355.gen_latest_rib_fn_as57355() regex = ( rf"{self.mrt_as57355.RIB_PREFIX}" @@ -610,7 +655,7 @@ def test_gen_latest_rib_fn_as57355(self): ) self.assertTrue(mins % self.mrt_as57355.RIB_INTERVAL == 0) - def test_gen_latest_rib_fn_ripe(self): + def test_gen_latest_rib_fn_ripe(self: "test_mrt_archive") -> None: rib_name = self.mrt_ripe.gen_latest_rib_fn_ripe() regex = ( rf"{self.mrt_ripe.RIB_PREFIX}" @@ -626,7 +671,7 @@ def test_gen_latest_rib_fn_ripe(self): ) self.assertTrue(mins % self.mrt_ripe.RIB_INTERVAL == 0) - def test_gen_latest_rib_fn_rv(self): + def test_gen_latest_rib_fn_rv(self: "test_mrt_archive") -> None: rib_name = self.mrt_rv.gen_latest_rib_fn_rv() regex = ( rf"{self.mrt_rv.RIB_PREFIX}" @@ -642,7 +687,7 @@ def test_gen_latest_rib_fn_rv(self): ) self.assertTrue(mins % self.mrt_rv.RIB_INTERVAL == 0) - def test_gen_latest_upd_fn(self): + def test_gen_latest_upd_fn(self: "test_mrt_archive") -> None: """ Check that the wrapper function calls the correct child function, and that an error is thrown if there is no matching child function: @@ -665,7 +710,7 @@ def test_gen_latest_upd_fn(self): self.assertRaises(ValueError, self.mrt_rv.gen_latest_upd_fn) self.mrt_rv.TYPE = self.RV_TYPE - def test_gen_latest_upd_fn_as57355(self): + def test_gen_latest_upd_fn_as57355(self: "test_mrt_archive") -> None: upd_name = self.mrt_as57355.gen_latest_upd_fn_as57355() regex = ( rf"{self.mrt_as57355.UPD_PREFIX}" @@ -681,7 +726,7 @@ def test_gen_latest_upd_fn_as57355(self): ) self.assertTrue(mins % self.mrt_as57355.UPD_INTERVAL == 0) - def test_gen_latest_upd_fn_ripe(self): + def test_gen_latest_upd_fn_ripe(self: "test_mrt_archive") -> None: upd_name = self.mrt_ripe.gen_latest_upd_fn_ripe() regex = ( rf"{self.mrt_ripe.UPD_PREFIX}" @@ -697,7 +742,7 @@ def test_gen_latest_upd_fn_ripe(self): ) self.assertTrue(mins % self.mrt_ripe.UPD_INTERVAL == 0) - def test_gen_latest_upd_fn_rv(self): + def test_gen_latest_upd_fn_rv(self: "test_mrt_archive") -> None: upd_name = self.mrt_rv.gen_latest_upd_fn_rv() regex = ( rf"{self.mrt_rv.UPD_PREFIX}" @@ -713,7 +758,7 @@ def test_gen_latest_upd_fn_rv(self): ) self.assertTrue(mins % self.mrt_rv.UPD_INTERVAL == 0) - def test_gen_rib_fn_date(self): + def test_gen_rib_fn_date(self: "test_mrt_archive") -> None: self.assertEqual( self.mrt_as57355.gen_rib_fn_date("20220101.0000"), "rib.20220101.0000.dump", @@ -727,51 +772,51 @@ def test_gen_rib_fn_date(self): "rib.20220101.0000.bz2", ) - def test_gen_rib_fns_day(self): + def test_gen_rib_fns_day(self: "test_mrt_archive") -> None: as57355_20220101 = [ - 'rib.20220101.0000.dump', - 'rib.20220101.0100.dump', - 'rib.20220101.0200.dump', - 'rib.20220101.0300.dump', - 'rib.20220101.0400.dump', - 'rib.20220101.0500.dump', - 'rib.20220101.0600.dump', - 'rib.20220101.0700.dump', - 'rib.20220101.0800.dump', - 'rib.20220101.0900.dump', - 'rib.20220101.1000.dump', - 'rib.20220101.1100.dump', - 'rib.20220101.1200.dump', - 'rib.20220101.1300.dump', - 'rib.20220101.1400.dump', - 'rib.20220101.1500.dump', - 'rib.20220101.1600.dump', - 'rib.20220101.1700.dump', - 'rib.20220101.1800.dump', - 'rib.20220101.1900.dump', - 'rib.20220101.2000.dump', - 'rib.20220101.2100.dump', - 'rib.20220101.2200.dump', - 'rib.20220101.2300.dump', + "rib.20220101.0000.dump", + "rib.20220101.0100.dump", + "rib.20220101.0200.dump", + "rib.20220101.0300.dump", + "rib.20220101.0400.dump", + "rib.20220101.0500.dump", + "rib.20220101.0600.dump", + "rib.20220101.0700.dump", + "rib.20220101.0800.dump", + "rib.20220101.0900.dump", + "rib.20220101.1000.dump", + "rib.20220101.1100.dump", + "rib.20220101.1200.dump", + "rib.20220101.1300.dump", + "rib.20220101.1400.dump", + "rib.20220101.1500.dump", + "rib.20220101.1600.dump", + "rib.20220101.1700.dump", + "rib.20220101.1800.dump", + "rib.20220101.1900.dump", + "rib.20220101.2000.dump", + "rib.20220101.2100.dump", + "rib.20220101.2200.dump", + "rib.20220101.2300.dump", ] ripe_20220101 = [ - 'bview.20220101.0000.gz', - 'bview.20220101.0800.gz', - 'bview.20220101.1600.gz', + "bview.20220101.0000.gz", + "bview.20220101.0800.gz", + "bview.20220101.1600.gz", ] rv_20220101 = [ - 'rib.20220101.0000.bz2', - 'rib.20220101.0200.bz2', - 'rib.20220101.0400.bz2', - 'rib.20220101.0600.bz2', - 'rib.20220101.0800.bz2', - 'rib.20220101.1000.bz2', - 'rib.20220101.1200.bz2', - 'rib.20220101.1400.bz2', - 'rib.20220101.1600.bz2', - 'rib.20220101.1800.bz2', - 'rib.20220101.2000.bz2', - 'rib.20220101.2200.bz2', + "rib.20220101.0000.bz2", + "rib.20220101.0200.bz2", + "rib.20220101.0400.bz2", + "rib.20220101.0600.bz2", + "rib.20220101.0800.bz2", + "rib.20220101.1000.bz2", + "rib.20220101.1200.bz2", + "rib.20220101.1400.bz2", + "rib.20220101.1600.bz2", + "rib.20220101.1800.bz2", + "rib.20220101.2000.bz2", + "rib.20220101.2200.bz2", ] self.assertEqual( self.mrt_as57355.gen_rib_fns_day("20220101"), as57355_20220101 @@ -781,17 +826,17 @@ def test_gen_rib_fns_day(self): ) self.assertEqual(self.mrt_rv.gen_rib_fns_day("20220101"), rv_20220101) - def test_gen_rib_fns_range(self): + def test_gen_rib_fns_range(self: "test_mrt_archive") -> None: as57355_20220101 = [ - 'rib.20220101.2300.dump', - 'rib.20220102.0000.dump', - 'rib.20220102.0100.dump', + "rib.20220101.2300.dump", + "rib.20220102.0000.dump", + "rib.20220102.0100.dump", ] - ripe_20220101 = ['bview.20220102.0000.gz', 'bview.20220102.0800.gz'] + ripe_20220101 = ["bview.20220102.0000.gz", "bview.20220102.0800.gz"] rv_20220101 = [ - 'rib.20220102.0000.bz2', - 'rib.20220102.0200.bz2', - 'rib.20220102.0400.bz2', + "rib.20220102.0000.bz2", + "rib.20220102.0200.bz2", + "rib.20220102.0400.bz2", ] self.assertEqual( self.mrt_as57355.gen_rib_fns_range( @@ -812,7 +857,7 @@ def test_gen_rib_fns_range(self): rv_20220101, ) - def test_gen_rib_key(self): + def test_gen_rib_key(self: "test_mrt_archive") -> None: self.assertEqual( self.mrt_as57355.gen_rib_key("20220101"), "UNIT_TEST_AS57355_RIB:20220101", @@ -828,7 +873,7 @@ def test_gen_rib_key(self): self.assertRaises(ValueError, self.mrt_rv.gen_rib_key, "") self.assertRaises(TypeError, self.mrt_rv.gen_rib_key, 123) - def test_gen_rib_url(self): + def test_gen_rib_url(self: "test_mrt_archive") -> None: """ Check that the wrapper function calls the correct child function, and that an error is thrown if there is no matching child function: @@ -849,10 +894,10 @@ def test_gen_rib_url(self): ) self.mrt_rv.TYPE = "HcSHqWb3C9i2jZqnzVj1" - self.assertRaises(ValueError, self.mrt_rv.gen_rib_url) + self.assertRaises(ValueError, self.mrt_rv.gen_rib_url, "") self.mrt_rv.TYPE = self.RV_TYPE - def test_gen_rib_url_as57355(self): + def test_gen_rib_url_as57355(self: "test_mrt_archive") -> None: as57355_url = ( "http://172.17.0.1:8000/lukasz/rib/rib.20220101.0000.dump" ) @@ -861,7 +906,7 @@ def test_gen_rib_url_as57355(self): as57355_url, ) - def test_gen_rib_url_ripe(self): + def test_gen_rib_url_ripe(self: "test_mrt_archive") -> None: ripe_url = ( "https://data.ris.ripe.net/rrc23/2022.01/bview.20220101.0000.gz" ) @@ -869,7 +914,7 @@ def test_gen_rib_url_ripe(self): self.mrt_ripe.gen_rib_url_ripe("bview.20220101.0000.gz"), ripe_url ) - def test_gen_rib_url_rv(self): + def test_gen_rib_url_rv(self: "test_mrt_archive") -> None: rv_url = ( "http://archive.routeviews.org/route-views.linx/bgpdata/2022.01/" "RIBS/rib.20220101.0000.bz2" @@ -878,11 +923,11 @@ def test_gen_rib_url_rv(self): self.mrt_rv.gen_rib_url_rv("rib.20220101.0000.bz2"), rv_url ) - def test_gen_rib_url_range(self): + def test_gen_rib_url_range(self: "test_mrt_archive") -> None: as57355_urls = [ - 'http://172.17.0.1:8000/lukasz/rib/rib.20220101.2300.dump', - 'http://172.17.0.1:8000/lukasz/rib/rib.20220102.0000.dump', - 'http://172.17.0.1:8000/lukasz/rib/rib.20220102.0100.dump', + "http://172.17.0.1:8000/lukasz/rib/rib.20220101.2300.dump", + "http://172.17.0.1:8000/lukasz/rib/rib.20220102.0000.dump", + "http://172.17.0.1:8000/lukasz/rib/rib.20220102.0100.dump", ] self.assertEqual( self.mrt_as57355.gen_rib_url_range( @@ -891,7 +936,7 @@ def test_gen_rib_url_range(self): as57355_urls, ) ripe_urls = [ - 'https://data.ris.ripe.net/rrc23/2022.01/bview.20220102.0000.gz' + "https://data.ris.ripe.net/rrc23/2022.01/bview.20220102.0000.gz" ] self.assertEqual( self.mrt_ripe.gen_rib_url_range( @@ -912,7 +957,7 @@ def test_gen_rib_url_range(self): rv_urls, ) - def test_gen_upd_fn_date(self): + def test_gen_upd_fn_date(self: "test_mrt_archive") -> None: self.assertEqual( self.mrt_as57355.gen_upd_fn_date("20220101.0000"), "20220101.0000.dump", @@ -929,540 +974,540 @@ def test_gen_upd_fn_date(self): self.assertRaises(ValueError, self.mrt_rv.gen_upd_fn_date, "") self.assertRaises(TypeError, self.mrt_rv.gen_upd_fn_date, 123) - def test_gen_upd_fns_day(self): + def test_gen_upd_fns_day(self: "test_mrt_archive") -> None: as57355_20220101 = [ - '20220101.0000.dump', - '20220101.0010.dump', - '20220101.0020.dump', - '20220101.0030.dump', - '20220101.0040.dump', - '20220101.0050.dump', - '20220101.0100.dump', - '20220101.0110.dump', - '20220101.0120.dump', - '20220101.0130.dump', - '20220101.0140.dump', - '20220101.0150.dump', - '20220101.0200.dump', - '20220101.0210.dump', - '20220101.0220.dump', - '20220101.0230.dump', - '20220101.0240.dump', - '20220101.0250.dump', - '20220101.0300.dump', - '20220101.0310.dump', - '20220101.0320.dump', - '20220101.0330.dump', - '20220101.0340.dump', - '20220101.0350.dump', - '20220101.0400.dump', - '20220101.0410.dump', - '20220101.0420.dump', - '20220101.0430.dump', - '20220101.0440.dump', - '20220101.0450.dump', - '20220101.0500.dump', - '20220101.0510.dump', - '20220101.0520.dump', - '20220101.0530.dump', - '20220101.0540.dump', - '20220101.0550.dump', - '20220101.0600.dump', - '20220101.0610.dump', - '20220101.0620.dump', - '20220101.0630.dump', - '20220101.0640.dump', - '20220101.0650.dump', - '20220101.0700.dump', - '20220101.0710.dump', - '20220101.0720.dump', - '20220101.0730.dump', - '20220101.0740.dump', - '20220101.0750.dump', - '20220101.0800.dump', - '20220101.0810.dump', - '20220101.0820.dump', - '20220101.0830.dump', - '20220101.0840.dump', - '20220101.0850.dump', - '20220101.0900.dump', - '20220101.0910.dump', - '20220101.0920.dump', - '20220101.0930.dump', - '20220101.0940.dump', - '20220101.0950.dump', - '20220101.1000.dump', - '20220101.1010.dump', - '20220101.1020.dump', - '20220101.1030.dump', - '20220101.1040.dump', - '20220101.1050.dump', - '20220101.1100.dump', - '20220101.1110.dump', - '20220101.1120.dump', - '20220101.1130.dump', - '20220101.1140.dump', - '20220101.1150.dump', - '20220101.1200.dump', - '20220101.1210.dump', - '20220101.1220.dump', - '20220101.1230.dump', - '20220101.1240.dump', - '20220101.1250.dump', - '20220101.1300.dump', - '20220101.1310.dump', - '20220101.1320.dump', - '20220101.1330.dump', - '20220101.1340.dump', - '20220101.1350.dump', - '20220101.1400.dump', - '20220101.1410.dump', - '20220101.1420.dump', - '20220101.1430.dump', - '20220101.1440.dump', - '20220101.1450.dump', - '20220101.1500.dump', - '20220101.1510.dump', - '20220101.1520.dump', - '20220101.1530.dump', - '20220101.1540.dump', - '20220101.1550.dump', - '20220101.1600.dump', - '20220101.1610.dump', - '20220101.1620.dump', - '20220101.1630.dump', - '20220101.1640.dump', - '20220101.1650.dump', - '20220101.1700.dump', - '20220101.1710.dump', - '20220101.1720.dump', - '20220101.1730.dump', - '20220101.1740.dump', - '20220101.1750.dump', - '20220101.1800.dump', - '20220101.1810.dump', - '20220101.1820.dump', - '20220101.1830.dump', - '20220101.1840.dump', - '20220101.1850.dump', - '20220101.1900.dump', - '20220101.1910.dump', - '20220101.1920.dump', - '20220101.1930.dump', - '20220101.1940.dump', - '20220101.1950.dump', - '20220101.2000.dump', - '20220101.2010.dump', - '20220101.2020.dump', - '20220101.2030.dump', - '20220101.2040.dump', - '20220101.2050.dump', - '20220101.2100.dump', - '20220101.2110.dump', - '20220101.2120.dump', - '20220101.2130.dump', - '20220101.2140.dump', - '20220101.2150.dump', - '20220101.2200.dump', - '20220101.2210.dump', - '20220101.2220.dump', - '20220101.2230.dump', - '20220101.2240.dump', - '20220101.2250.dump', - '20220101.2300.dump', - '20220101.2310.dump', - '20220101.2320.dump', - '20220101.2330.dump', - '20220101.2340.dump', - '20220101.2350.dump', + "20220101.0000.dump", + "20220101.0010.dump", + "20220101.0020.dump", + "20220101.0030.dump", + "20220101.0040.dump", + "20220101.0050.dump", + "20220101.0100.dump", + "20220101.0110.dump", + "20220101.0120.dump", + "20220101.0130.dump", + "20220101.0140.dump", + "20220101.0150.dump", + "20220101.0200.dump", + "20220101.0210.dump", + "20220101.0220.dump", + "20220101.0230.dump", + "20220101.0240.dump", + "20220101.0250.dump", + "20220101.0300.dump", + "20220101.0310.dump", + "20220101.0320.dump", + "20220101.0330.dump", + "20220101.0340.dump", + "20220101.0350.dump", + "20220101.0400.dump", + "20220101.0410.dump", + "20220101.0420.dump", + "20220101.0430.dump", + "20220101.0440.dump", + "20220101.0450.dump", + "20220101.0500.dump", + "20220101.0510.dump", + "20220101.0520.dump", + "20220101.0530.dump", + "20220101.0540.dump", + "20220101.0550.dump", + "20220101.0600.dump", + "20220101.0610.dump", + "20220101.0620.dump", + "20220101.0630.dump", + "20220101.0640.dump", + "20220101.0650.dump", + "20220101.0700.dump", + "20220101.0710.dump", + "20220101.0720.dump", + "20220101.0730.dump", + "20220101.0740.dump", + "20220101.0750.dump", + "20220101.0800.dump", + "20220101.0810.dump", + "20220101.0820.dump", + "20220101.0830.dump", + "20220101.0840.dump", + "20220101.0850.dump", + "20220101.0900.dump", + "20220101.0910.dump", + "20220101.0920.dump", + "20220101.0930.dump", + "20220101.0940.dump", + "20220101.0950.dump", + "20220101.1000.dump", + "20220101.1010.dump", + "20220101.1020.dump", + "20220101.1030.dump", + "20220101.1040.dump", + "20220101.1050.dump", + "20220101.1100.dump", + "20220101.1110.dump", + "20220101.1120.dump", + "20220101.1130.dump", + "20220101.1140.dump", + "20220101.1150.dump", + "20220101.1200.dump", + "20220101.1210.dump", + "20220101.1220.dump", + "20220101.1230.dump", + "20220101.1240.dump", + "20220101.1250.dump", + "20220101.1300.dump", + "20220101.1310.dump", + "20220101.1320.dump", + "20220101.1330.dump", + "20220101.1340.dump", + "20220101.1350.dump", + "20220101.1400.dump", + "20220101.1410.dump", + "20220101.1420.dump", + "20220101.1430.dump", + "20220101.1440.dump", + "20220101.1450.dump", + "20220101.1500.dump", + "20220101.1510.dump", + "20220101.1520.dump", + "20220101.1530.dump", + "20220101.1540.dump", + "20220101.1550.dump", + "20220101.1600.dump", + "20220101.1610.dump", + "20220101.1620.dump", + "20220101.1630.dump", + "20220101.1640.dump", + "20220101.1650.dump", + "20220101.1700.dump", + "20220101.1710.dump", + "20220101.1720.dump", + "20220101.1730.dump", + "20220101.1740.dump", + "20220101.1750.dump", + "20220101.1800.dump", + "20220101.1810.dump", + "20220101.1820.dump", + "20220101.1830.dump", + "20220101.1840.dump", + "20220101.1850.dump", + "20220101.1900.dump", + "20220101.1910.dump", + "20220101.1920.dump", + "20220101.1930.dump", + "20220101.1940.dump", + "20220101.1950.dump", + "20220101.2000.dump", + "20220101.2010.dump", + "20220101.2020.dump", + "20220101.2030.dump", + "20220101.2040.dump", + "20220101.2050.dump", + "20220101.2100.dump", + "20220101.2110.dump", + "20220101.2120.dump", + "20220101.2130.dump", + "20220101.2140.dump", + "20220101.2150.dump", + "20220101.2200.dump", + "20220101.2210.dump", + "20220101.2220.dump", + "20220101.2230.dump", + "20220101.2240.dump", + "20220101.2250.dump", + "20220101.2300.dump", + "20220101.2310.dump", + "20220101.2320.dump", + "20220101.2330.dump", + "20220101.2340.dump", + "20220101.2350.dump", ] ripe_20220101 = [ - 'updates.20220101.0000.gz', - 'updates.20220101.0005.gz', - 'updates.20220101.0010.gz', - 'updates.20220101.0015.gz', - 'updates.20220101.0020.gz', - 'updates.20220101.0025.gz', - 'updates.20220101.0030.gz', - 'updates.20220101.0035.gz', - 'updates.20220101.0040.gz', - 'updates.20220101.0045.gz', - 'updates.20220101.0050.gz', - 'updates.20220101.0055.gz', - 'updates.20220101.0100.gz', - 'updates.20220101.0105.gz', - 'updates.20220101.0110.gz', - 'updates.20220101.0115.gz', - 'updates.20220101.0120.gz', - 'updates.20220101.0125.gz', - 'updates.20220101.0130.gz', - 'updates.20220101.0135.gz', - 'updates.20220101.0140.gz', - 'updates.20220101.0145.gz', - 'updates.20220101.0150.gz', - 'updates.20220101.0155.gz', - 'updates.20220101.0200.gz', - 'updates.20220101.0205.gz', - 'updates.20220101.0210.gz', - 'updates.20220101.0215.gz', - 'updates.20220101.0220.gz', - 'updates.20220101.0225.gz', - 'updates.20220101.0230.gz', - 'updates.20220101.0235.gz', - 'updates.20220101.0240.gz', - 'updates.20220101.0245.gz', - 'updates.20220101.0250.gz', - 'updates.20220101.0255.gz', - 'updates.20220101.0300.gz', - 'updates.20220101.0305.gz', - 'updates.20220101.0310.gz', - 'updates.20220101.0315.gz', - 'updates.20220101.0320.gz', - 'updates.20220101.0325.gz', - 'updates.20220101.0330.gz', - 'updates.20220101.0335.gz', - 'updates.20220101.0340.gz', - 'updates.20220101.0345.gz', - 'updates.20220101.0350.gz', - 'updates.20220101.0355.gz', - 'updates.20220101.0400.gz', - 'updates.20220101.0405.gz', - 'updates.20220101.0410.gz', - 'updates.20220101.0415.gz', - 'updates.20220101.0420.gz', - 'updates.20220101.0425.gz', - 'updates.20220101.0430.gz', - 'updates.20220101.0435.gz', - 'updates.20220101.0440.gz', - 'updates.20220101.0445.gz', - 'updates.20220101.0450.gz', - 'updates.20220101.0455.gz', - 'updates.20220101.0500.gz', - 'updates.20220101.0505.gz', - 'updates.20220101.0510.gz', - 'updates.20220101.0515.gz', - 'updates.20220101.0520.gz', - 'updates.20220101.0525.gz', - 'updates.20220101.0530.gz', - 'updates.20220101.0535.gz', - 'updates.20220101.0540.gz', - 'updates.20220101.0545.gz', - 'updates.20220101.0550.gz', - 'updates.20220101.0555.gz', - 'updates.20220101.0600.gz', - 'updates.20220101.0605.gz', - 'updates.20220101.0610.gz', - 'updates.20220101.0615.gz', - 'updates.20220101.0620.gz', - 'updates.20220101.0625.gz', - 'updates.20220101.0630.gz', - 'updates.20220101.0635.gz', - 'updates.20220101.0640.gz', - 'updates.20220101.0645.gz', - 'updates.20220101.0650.gz', - 'updates.20220101.0655.gz', - 'updates.20220101.0700.gz', - 'updates.20220101.0705.gz', - 'updates.20220101.0710.gz', - 'updates.20220101.0715.gz', - 'updates.20220101.0720.gz', - 'updates.20220101.0725.gz', - 'updates.20220101.0730.gz', - 'updates.20220101.0735.gz', - 'updates.20220101.0740.gz', - 'updates.20220101.0745.gz', - 'updates.20220101.0750.gz', - 'updates.20220101.0755.gz', - 'updates.20220101.0800.gz', - 'updates.20220101.0805.gz', - 'updates.20220101.0810.gz', - 'updates.20220101.0815.gz', - 'updates.20220101.0820.gz', - 'updates.20220101.0825.gz', - 'updates.20220101.0830.gz', - 'updates.20220101.0835.gz', - 'updates.20220101.0840.gz', - 'updates.20220101.0845.gz', - 'updates.20220101.0850.gz', - 'updates.20220101.0855.gz', - 'updates.20220101.0900.gz', - 'updates.20220101.0905.gz', - 'updates.20220101.0910.gz', - 'updates.20220101.0915.gz', - 'updates.20220101.0920.gz', - 'updates.20220101.0925.gz', - 'updates.20220101.0930.gz', - 'updates.20220101.0935.gz', - 'updates.20220101.0940.gz', - 'updates.20220101.0945.gz', - 'updates.20220101.0950.gz', - 'updates.20220101.0955.gz', - 'updates.20220101.1000.gz', - 'updates.20220101.1005.gz', - 'updates.20220101.1010.gz', - 'updates.20220101.1015.gz', - 'updates.20220101.1020.gz', - 'updates.20220101.1025.gz', - 'updates.20220101.1030.gz', - 'updates.20220101.1035.gz', - 'updates.20220101.1040.gz', - 'updates.20220101.1045.gz', - 'updates.20220101.1050.gz', - 'updates.20220101.1055.gz', - 'updates.20220101.1100.gz', - 'updates.20220101.1105.gz', - 'updates.20220101.1110.gz', - 'updates.20220101.1115.gz', - 'updates.20220101.1120.gz', - 'updates.20220101.1125.gz', - 'updates.20220101.1130.gz', - 'updates.20220101.1135.gz', - 'updates.20220101.1140.gz', - 'updates.20220101.1145.gz', - 'updates.20220101.1150.gz', - 'updates.20220101.1155.gz', - 'updates.20220101.1200.gz', - 'updates.20220101.1205.gz', - 'updates.20220101.1210.gz', - 'updates.20220101.1215.gz', - 'updates.20220101.1220.gz', - 'updates.20220101.1225.gz', - 'updates.20220101.1230.gz', - 'updates.20220101.1235.gz', - 'updates.20220101.1240.gz', - 'updates.20220101.1245.gz', - 'updates.20220101.1250.gz', - 'updates.20220101.1255.gz', - 'updates.20220101.1300.gz', - 'updates.20220101.1305.gz', - 'updates.20220101.1310.gz', - 'updates.20220101.1315.gz', - 'updates.20220101.1320.gz', - 'updates.20220101.1325.gz', - 'updates.20220101.1330.gz', - 'updates.20220101.1335.gz', - 'updates.20220101.1340.gz', - 'updates.20220101.1345.gz', - 'updates.20220101.1350.gz', - 'updates.20220101.1355.gz', - 'updates.20220101.1400.gz', - 'updates.20220101.1405.gz', - 'updates.20220101.1410.gz', - 'updates.20220101.1415.gz', - 'updates.20220101.1420.gz', - 'updates.20220101.1425.gz', - 'updates.20220101.1430.gz', - 'updates.20220101.1435.gz', - 'updates.20220101.1440.gz', - 'updates.20220101.1445.gz', - 'updates.20220101.1450.gz', - 'updates.20220101.1455.gz', - 'updates.20220101.1500.gz', - 'updates.20220101.1505.gz', - 'updates.20220101.1510.gz', - 'updates.20220101.1515.gz', - 'updates.20220101.1520.gz', - 'updates.20220101.1525.gz', - 'updates.20220101.1530.gz', - 'updates.20220101.1535.gz', - 'updates.20220101.1540.gz', - 'updates.20220101.1545.gz', - 'updates.20220101.1550.gz', - 'updates.20220101.1555.gz', - 'updates.20220101.1600.gz', - 'updates.20220101.1605.gz', - 'updates.20220101.1610.gz', - 'updates.20220101.1615.gz', - 'updates.20220101.1620.gz', - 'updates.20220101.1625.gz', - 'updates.20220101.1630.gz', - 'updates.20220101.1635.gz', - 'updates.20220101.1640.gz', - 'updates.20220101.1645.gz', - 'updates.20220101.1650.gz', - 'updates.20220101.1655.gz', - 'updates.20220101.1700.gz', - 'updates.20220101.1705.gz', - 'updates.20220101.1710.gz', - 'updates.20220101.1715.gz', - 'updates.20220101.1720.gz', - 'updates.20220101.1725.gz', - 'updates.20220101.1730.gz', - 'updates.20220101.1735.gz', - 'updates.20220101.1740.gz', - 'updates.20220101.1745.gz', - 'updates.20220101.1750.gz', - 'updates.20220101.1755.gz', - 'updates.20220101.1800.gz', - 'updates.20220101.1805.gz', - 'updates.20220101.1810.gz', - 'updates.20220101.1815.gz', - 'updates.20220101.1820.gz', - 'updates.20220101.1825.gz', - 'updates.20220101.1830.gz', - 'updates.20220101.1835.gz', - 'updates.20220101.1840.gz', - 'updates.20220101.1845.gz', - 'updates.20220101.1850.gz', - 'updates.20220101.1855.gz', - 'updates.20220101.1900.gz', - 'updates.20220101.1905.gz', - 'updates.20220101.1910.gz', - 'updates.20220101.1915.gz', - 'updates.20220101.1920.gz', - 'updates.20220101.1925.gz', - 'updates.20220101.1930.gz', - 'updates.20220101.1935.gz', - 'updates.20220101.1940.gz', - 'updates.20220101.1945.gz', - 'updates.20220101.1950.gz', - 'updates.20220101.1955.gz', - 'updates.20220101.2000.gz', - 'updates.20220101.2005.gz', - 'updates.20220101.2010.gz', - 'updates.20220101.2015.gz', - 'updates.20220101.2020.gz', - 'updates.20220101.2025.gz', - 'updates.20220101.2030.gz', - 'updates.20220101.2035.gz', - 'updates.20220101.2040.gz', - 'updates.20220101.2045.gz', - 'updates.20220101.2050.gz', - 'updates.20220101.2055.gz', - 'updates.20220101.2100.gz', - 'updates.20220101.2105.gz', - 'updates.20220101.2110.gz', - 'updates.20220101.2115.gz', - 'updates.20220101.2120.gz', - 'updates.20220101.2125.gz', - 'updates.20220101.2130.gz', - 'updates.20220101.2135.gz', - 'updates.20220101.2140.gz', - 'updates.20220101.2145.gz', - 'updates.20220101.2150.gz', - 'updates.20220101.2155.gz', - 'updates.20220101.2200.gz', - 'updates.20220101.2205.gz', - 'updates.20220101.2210.gz', - 'updates.20220101.2215.gz', - 'updates.20220101.2220.gz', - 'updates.20220101.2225.gz', - 'updates.20220101.2230.gz', - 'updates.20220101.2235.gz', - 'updates.20220101.2240.gz', - 'updates.20220101.2245.gz', - 'updates.20220101.2250.gz', - 'updates.20220101.2255.gz', - 'updates.20220101.2300.gz', - 'updates.20220101.2305.gz', - 'updates.20220101.2310.gz', - 'updates.20220101.2315.gz', - 'updates.20220101.2320.gz', - 'updates.20220101.2325.gz', - 'updates.20220101.2330.gz', - 'updates.20220101.2335.gz', - 'updates.20220101.2340.gz', - 'updates.20220101.2345.gz', - 'updates.20220101.2350.gz', - 'updates.20220101.2355.gz', + "updates.20220101.0000.gz", + "updates.20220101.0005.gz", + "updates.20220101.0010.gz", + "updates.20220101.0015.gz", + "updates.20220101.0020.gz", + "updates.20220101.0025.gz", + "updates.20220101.0030.gz", + "updates.20220101.0035.gz", + "updates.20220101.0040.gz", + "updates.20220101.0045.gz", + "updates.20220101.0050.gz", + "updates.20220101.0055.gz", + "updates.20220101.0100.gz", + "updates.20220101.0105.gz", + "updates.20220101.0110.gz", + "updates.20220101.0115.gz", + "updates.20220101.0120.gz", + "updates.20220101.0125.gz", + "updates.20220101.0130.gz", + "updates.20220101.0135.gz", + "updates.20220101.0140.gz", + "updates.20220101.0145.gz", + "updates.20220101.0150.gz", + "updates.20220101.0155.gz", + "updates.20220101.0200.gz", + "updates.20220101.0205.gz", + "updates.20220101.0210.gz", + "updates.20220101.0215.gz", + "updates.20220101.0220.gz", + "updates.20220101.0225.gz", + "updates.20220101.0230.gz", + "updates.20220101.0235.gz", + "updates.20220101.0240.gz", + "updates.20220101.0245.gz", + "updates.20220101.0250.gz", + "updates.20220101.0255.gz", + "updates.20220101.0300.gz", + "updates.20220101.0305.gz", + "updates.20220101.0310.gz", + "updates.20220101.0315.gz", + "updates.20220101.0320.gz", + "updates.20220101.0325.gz", + "updates.20220101.0330.gz", + "updates.20220101.0335.gz", + "updates.20220101.0340.gz", + "updates.20220101.0345.gz", + "updates.20220101.0350.gz", + "updates.20220101.0355.gz", + "updates.20220101.0400.gz", + "updates.20220101.0405.gz", + "updates.20220101.0410.gz", + "updates.20220101.0415.gz", + "updates.20220101.0420.gz", + "updates.20220101.0425.gz", + "updates.20220101.0430.gz", + "updates.20220101.0435.gz", + "updates.20220101.0440.gz", + "updates.20220101.0445.gz", + "updates.20220101.0450.gz", + "updates.20220101.0455.gz", + "updates.20220101.0500.gz", + "updates.20220101.0505.gz", + "updates.20220101.0510.gz", + "updates.20220101.0515.gz", + "updates.20220101.0520.gz", + "updates.20220101.0525.gz", + "updates.20220101.0530.gz", + "updates.20220101.0535.gz", + "updates.20220101.0540.gz", + "updates.20220101.0545.gz", + "updates.20220101.0550.gz", + "updates.20220101.0555.gz", + "updates.20220101.0600.gz", + "updates.20220101.0605.gz", + "updates.20220101.0610.gz", + "updates.20220101.0615.gz", + "updates.20220101.0620.gz", + "updates.20220101.0625.gz", + "updates.20220101.0630.gz", + "updates.20220101.0635.gz", + "updates.20220101.0640.gz", + "updates.20220101.0645.gz", + "updates.20220101.0650.gz", + "updates.20220101.0655.gz", + "updates.20220101.0700.gz", + "updates.20220101.0705.gz", + "updates.20220101.0710.gz", + "updates.20220101.0715.gz", + "updates.20220101.0720.gz", + "updates.20220101.0725.gz", + "updates.20220101.0730.gz", + "updates.20220101.0735.gz", + "updates.20220101.0740.gz", + "updates.20220101.0745.gz", + "updates.20220101.0750.gz", + "updates.20220101.0755.gz", + "updates.20220101.0800.gz", + "updates.20220101.0805.gz", + "updates.20220101.0810.gz", + "updates.20220101.0815.gz", + "updates.20220101.0820.gz", + "updates.20220101.0825.gz", + "updates.20220101.0830.gz", + "updates.20220101.0835.gz", + "updates.20220101.0840.gz", + "updates.20220101.0845.gz", + "updates.20220101.0850.gz", + "updates.20220101.0855.gz", + "updates.20220101.0900.gz", + "updates.20220101.0905.gz", + "updates.20220101.0910.gz", + "updates.20220101.0915.gz", + "updates.20220101.0920.gz", + "updates.20220101.0925.gz", + "updates.20220101.0930.gz", + "updates.20220101.0935.gz", + "updates.20220101.0940.gz", + "updates.20220101.0945.gz", + "updates.20220101.0950.gz", + "updates.20220101.0955.gz", + "updates.20220101.1000.gz", + "updates.20220101.1005.gz", + "updates.20220101.1010.gz", + "updates.20220101.1015.gz", + "updates.20220101.1020.gz", + "updates.20220101.1025.gz", + "updates.20220101.1030.gz", + "updates.20220101.1035.gz", + "updates.20220101.1040.gz", + "updates.20220101.1045.gz", + "updates.20220101.1050.gz", + "updates.20220101.1055.gz", + "updates.20220101.1100.gz", + "updates.20220101.1105.gz", + "updates.20220101.1110.gz", + "updates.20220101.1115.gz", + "updates.20220101.1120.gz", + "updates.20220101.1125.gz", + "updates.20220101.1130.gz", + "updates.20220101.1135.gz", + "updates.20220101.1140.gz", + "updates.20220101.1145.gz", + "updates.20220101.1150.gz", + "updates.20220101.1155.gz", + "updates.20220101.1200.gz", + "updates.20220101.1205.gz", + "updates.20220101.1210.gz", + "updates.20220101.1215.gz", + "updates.20220101.1220.gz", + "updates.20220101.1225.gz", + "updates.20220101.1230.gz", + "updates.20220101.1235.gz", + "updates.20220101.1240.gz", + "updates.20220101.1245.gz", + "updates.20220101.1250.gz", + "updates.20220101.1255.gz", + "updates.20220101.1300.gz", + "updates.20220101.1305.gz", + "updates.20220101.1310.gz", + "updates.20220101.1315.gz", + "updates.20220101.1320.gz", + "updates.20220101.1325.gz", + "updates.20220101.1330.gz", + "updates.20220101.1335.gz", + "updates.20220101.1340.gz", + "updates.20220101.1345.gz", + "updates.20220101.1350.gz", + "updates.20220101.1355.gz", + "updates.20220101.1400.gz", + "updates.20220101.1405.gz", + "updates.20220101.1410.gz", + "updates.20220101.1415.gz", + "updates.20220101.1420.gz", + "updates.20220101.1425.gz", + "updates.20220101.1430.gz", + "updates.20220101.1435.gz", + "updates.20220101.1440.gz", + "updates.20220101.1445.gz", + "updates.20220101.1450.gz", + "updates.20220101.1455.gz", + "updates.20220101.1500.gz", + "updates.20220101.1505.gz", + "updates.20220101.1510.gz", + "updates.20220101.1515.gz", + "updates.20220101.1520.gz", + "updates.20220101.1525.gz", + "updates.20220101.1530.gz", + "updates.20220101.1535.gz", + "updates.20220101.1540.gz", + "updates.20220101.1545.gz", + "updates.20220101.1550.gz", + "updates.20220101.1555.gz", + "updates.20220101.1600.gz", + "updates.20220101.1605.gz", + "updates.20220101.1610.gz", + "updates.20220101.1615.gz", + "updates.20220101.1620.gz", + "updates.20220101.1625.gz", + "updates.20220101.1630.gz", + "updates.20220101.1635.gz", + "updates.20220101.1640.gz", + "updates.20220101.1645.gz", + "updates.20220101.1650.gz", + "updates.20220101.1655.gz", + "updates.20220101.1700.gz", + "updates.20220101.1705.gz", + "updates.20220101.1710.gz", + "updates.20220101.1715.gz", + "updates.20220101.1720.gz", + "updates.20220101.1725.gz", + "updates.20220101.1730.gz", + "updates.20220101.1735.gz", + "updates.20220101.1740.gz", + "updates.20220101.1745.gz", + "updates.20220101.1750.gz", + "updates.20220101.1755.gz", + "updates.20220101.1800.gz", + "updates.20220101.1805.gz", + "updates.20220101.1810.gz", + "updates.20220101.1815.gz", + "updates.20220101.1820.gz", + "updates.20220101.1825.gz", + "updates.20220101.1830.gz", + "updates.20220101.1835.gz", + "updates.20220101.1840.gz", + "updates.20220101.1845.gz", + "updates.20220101.1850.gz", + "updates.20220101.1855.gz", + "updates.20220101.1900.gz", + "updates.20220101.1905.gz", + "updates.20220101.1910.gz", + "updates.20220101.1915.gz", + "updates.20220101.1920.gz", + "updates.20220101.1925.gz", + "updates.20220101.1930.gz", + "updates.20220101.1935.gz", + "updates.20220101.1940.gz", + "updates.20220101.1945.gz", + "updates.20220101.1950.gz", + "updates.20220101.1955.gz", + "updates.20220101.2000.gz", + "updates.20220101.2005.gz", + "updates.20220101.2010.gz", + "updates.20220101.2015.gz", + "updates.20220101.2020.gz", + "updates.20220101.2025.gz", + "updates.20220101.2030.gz", + "updates.20220101.2035.gz", + "updates.20220101.2040.gz", + "updates.20220101.2045.gz", + "updates.20220101.2050.gz", + "updates.20220101.2055.gz", + "updates.20220101.2100.gz", + "updates.20220101.2105.gz", + "updates.20220101.2110.gz", + "updates.20220101.2115.gz", + "updates.20220101.2120.gz", + "updates.20220101.2125.gz", + "updates.20220101.2130.gz", + "updates.20220101.2135.gz", + "updates.20220101.2140.gz", + "updates.20220101.2145.gz", + "updates.20220101.2150.gz", + "updates.20220101.2155.gz", + "updates.20220101.2200.gz", + "updates.20220101.2205.gz", + "updates.20220101.2210.gz", + "updates.20220101.2215.gz", + "updates.20220101.2220.gz", + "updates.20220101.2225.gz", + "updates.20220101.2230.gz", + "updates.20220101.2235.gz", + "updates.20220101.2240.gz", + "updates.20220101.2245.gz", + "updates.20220101.2250.gz", + "updates.20220101.2255.gz", + "updates.20220101.2300.gz", + "updates.20220101.2305.gz", + "updates.20220101.2310.gz", + "updates.20220101.2315.gz", + "updates.20220101.2320.gz", + "updates.20220101.2325.gz", + "updates.20220101.2330.gz", + "updates.20220101.2335.gz", + "updates.20220101.2340.gz", + "updates.20220101.2345.gz", + "updates.20220101.2350.gz", + "updates.20220101.2355.gz", ] rv_20220101 = [ - 'updates.20220101.0000.bz2', - 'updates.20220101.0015.bz2', - 'updates.20220101.0030.bz2', - 'updates.20220101.0045.bz2', - 'updates.20220101.0100.bz2', - 'updates.20220101.0115.bz2', - 'updates.20220101.0130.bz2', - 'updates.20220101.0145.bz2', - 'updates.20220101.0200.bz2', - 'updates.20220101.0215.bz2', - 'updates.20220101.0230.bz2', - 'updates.20220101.0245.bz2', - 'updates.20220101.0300.bz2', - 'updates.20220101.0315.bz2', - 'updates.20220101.0330.bz2', - 'updates.20220101.0345.bz2', - 'updates.20220101.0400.bz2', - 'updates.20220101.0415.bz2', - 'updates.20220101.0430.bz2', - 'updates.20220101.0445.bz2', - 'updates.20220101.0500.bz2', - 'updates.20220101.0515.bz2', - 'updates.20220101.0530.bz2', - 'updates.20220101.0545.bz2', - 'updates.20220101.0600.bz2', - 'updates.20220101.0615.bz2', - 'updates.20220101.0630.bz2', - 'updates.20220101.0645.bz2', - 'updates.20220101.0700.bz2', - 'updates.20220101.0715.bz2', - 'updates.20220101.0730.bz2', - 'updates.20220101.0745.bz2', - 'updates.20220101.0800.bz2', - 'updates.20220101.0815.bz2', - 'updates.20220101.0830.bz2', - 'updates.20220101.0845.bz2', - 'updates.20220101.0900.bz2', - 'updates.20220101.0915.bz2', - 'updates.20220101.0930.bz2', - 'updates.20220101.0945.bz2', - 'updates.20220101.1000.bz2', - 'updates.20220101.1015.bz2', - 'updates.20220101.1030.bz2', - 'updates.20220101.1045.bz2', - 'updates.20220101.1100.bz2', - 'updates.20220101.1115.bz2', - 'updates.20220101.1130.bz2', - 'updates.20220101.1145.bz2', - 'updates.20220101.1200.bz2', - 'updates.20220101.1215.bz2', - 'updates.20220101.1230.bz2', - 'updates.20220101.1245.bz2', - 'updates.20220101.1300.bz2', - 'updates.20220101.1315.bz2', - 'updates.20220101.1330.bz2', - 'updates.20220101.1345.bz2', - 'updates.20220101.1400.bz2', - 'updates.20220101.1415.bz2', - 'updates.20220101.1430.bz2', - 'updates.20220101.1445.bz2', - 'updates.20220101.1500.bz2', - 'updates.20220101.1515.bz2', - 'updates.20220101.1530.bz2', - 'updates.20220101.1545.bz2', - 'updates.20220101.1600.bz2', - 'updates.20220101.1615.bz2', - 'updates.20220101.1630.bz2', - 'updates.20220101.1645.bz2', - 'updates.20220101.1700.bz2', - 'updates.20220101.1715.bz2', - 'updates.20220101.1730.bz2', - 'updates.20220101.1745.bz2', - 'updates.20220101.1800.bz2', - 'updates.20220101.1815.bz2', - 'updates.20220101.1830.bz2', - 'updates.20220101.1845.bz2', - 'updates.20220101.1900.bz2', - 'updates.20220101.1915.bz2', - 'updates.20220101.1930.bz2', - 'updates.20220101.1945.bz2', - 'updates.20220101.2000.bz2', - 'updates.20220101.2015.bz2', - 'updates.20220101.2030.bz2', - 'updates.20220101.2045.bz2', - 'updates.20220101.2100.bz2', - 'updates.20220101.2115.bz2', - 'updates.20220101.2130.bz2', - 'updates.20220101.2145.bz2', - 'updates.20220101.2200.bz2', - 'updates.20220101.2215.bz2', - 'updates.20220101.2230.bz2', - 'updates.20220101.2245.bz2', - 'updates.20220101.2300.bz2', - 'updates.20220101.2315.bz2', - 'updates.20220101.2330.bz2', - 'updates.20220101.2345.bz2', + "updates.20220101.0000.bz2", + "updates.20220101.0015.bz2", + "updates.20220101.0030.bz2", + "updates.20220101.0045.bz2", + "updates.20220101.0100.bz2", + "updates.20220101.0115.bz2", + "updates.20220101.0130.bz2", + "updates.20220101.0145.bz2", + "updates.20220101.0200.bz2", + "updates.20220101.0215.bz2", + "updates.20220101.0230.bz2", + "updates.20220101.0245.bz2", + "updates.20220101.0300.bz2", + "updates.20220101.0315.bz2", + "updates.20220101.0330.bz2", + "updates.20220101.0345.bz2", + "updates.20220101.0400.bz2", + "updates.20220101.0415.bz2", + "updates.20220101.0430.bz2", + "updates.20220101.0445.bz2", + "updates.20220101.0500.bz2", + "updates.20220101.0515.bz2", + "updates.20220101.0530.bz2", + "updates.20220101.0545.bz2", + "updates.20220101.0600.bz2", + "updates.20220101.0615.bz2", + "updates.20220101.0630.bz2", + "updates.20220101.0645.bz2", + "updates.20220101.0700.bz2", + "updates.20220101.0715.bz2", + "updates.20220101.0730.bz2", + "updates.20220101.0745.bz2", + "updates.20220101.0800.bz2", + "updates.20220101.0815.bz2", + "updates.20220101.0830.bz2", + "updates.20220101.0845.bz2", + "updates.20220101.0900.bz2", + "updates.20220101.0915.bz2", + "updates.20220101.0930.bz2", + "updates.20220101.0945.bz2", + "updates.20220101.1000.bz2", + "updates.20220101.1015.bz2", + "updates.20220101.1030.bz2", + "updates.20220101.1045.bz2", + "updates.20220101.1100.bz2", + "updates.20220101.1115.bz2", + "updates.20220101.1130.bz2", + "updates.20220101.1145.bz2", + "updates.20220101.1200.bz2", + "updates.20220101.1215.bz2", + "updates.20220101.1230.bz2", + "updates.20220101.1245.bz2", + "updates.20220101.1300.bz2", + "updates.20220101.1315.bz2", + "updates.20220101.1330.bz2", + "updates.20220101.1345.bz2", + "updates.20220101.1400.bz2", + "updates.20220101.1415.bz2", + "updates.20220101.1430.bz2", + "updates.20220101.1445.bz2", + "updates.20220101.1500.bz2", + "updates.20220101.1515.bz2", + "updates.20220101.1530.bz2", + "updates.20220101.1545.bz2", + "updates.20220101.1600.bz2", + "updates.20220101.1615.bz2", + "updates.20220101.1630.bz2", + "updates.20220101.1645.bz2", + "updates.20220101.1700.bz2", + "updates.20220101.1715.bz2", + "updates.20220101.1730.bz2", + "updates.20220101.1745.bz2", + "updates.20220101.1800.bz2", + "updates.20220101.1815.bz2", + "updates.20220101.1830.bz2", + "updates.20220101.1845.bz2", + "updates.20220101.1900.bz2", + "updates.20220101.1915.bz2", + "updates.20220101.1930.bz2", + "updates.20220101.1945.bz2", + "updates.20220101.2000.bz2", + "updates.20220101.2015.bz2", + "updates.20220101.2030.bz2", + "updates.20220101.2045.bz2", + "updates.20220101.2100.bz2", + "updates.20220101.2115.bz2", + "updates.20220101.2130.bz2", + "updates.20220101.2145.bz2", + "updates.20220101.2200.bz2", + "updates.20220101.2215.bz2", + "updates.20220101.2230.bz2", + "updates.20220101.2245.bz2", + "updates.20220101.2300.bz2", + "updates.20220101.2315.bz2", + "updates.20220101.2330.bz2", + "updates.20220101.2345.bz2", ] self.assertEqual( self.mrt_as57355.gen_upd_fns_day("20220101"), as57355_20220101 @@ -1472,20 +1517,20 @@ def test_gen_upd_fns_day(self): ) self.assertEqual(self.mrt_rv.gen_upd_fns_day("20220101"), rv_20220101) - def test_gen_upd_fns_range(self): + def test_gen_upd_fns_range(self: "test_mrt_archive") -> None: as57355_20220101 = [ - '20220101.2350.dump', - '20220102.0000.dump', - '20220102.0010.dump', + "20220101.2350.dump", + "20220102.0000.dump", + "20220102.0010.dump", ] ripe_20220101 = [ - 'updates.20220101.2350.gz', - 'updates.20220101.2355.gz', - 'updates.20220102.0000.gz', - 'updates.20220102.0005.gz', - 'updates.20220102.0010.gz', + "updates.20220101.2350.gz", + "updates.20220101.2355.gz", + "updates.20220102.0000.gz", + "updates.20220102.0005.gz", + "updates.20220102.0010.gz", ] - rv_20220101 = ['updates.20220102.0000.bz2'] + rv_20220101 = ["updates.20220102.0000.bz2"] self.assertEqual( self.mrt_as57355.gen_upd_fns_range( start_date="20220101.2350", end_date="20220102.0010" @@ -1505,7 +1550,7 @@ def test_gen_upd_fns_range(self): rv_20220101, ) - def test_gen_upd_key(self): + def test_gen_upd_key(self: "test_mrt_archive") -> None: self.assertEqual( self.mrt_as57355.gen_upd_key("20220101"), "UNIT_TEST_AS57355_UPD:20220101", @@ -1521,7 +1566,7 @@ def test_gen_upd_key(self): self.assertRaises(ValueError, self.mrt_rv.gen_upd_key, "") self.assertRaises(TypeError, self.mrt_rv.gen_upd_key, 123) - def test_gen_upd_url(self): + def test_gen_upd_url(self: "test_mrt_archive") -> None: """ Check that the wrapper function calls the correct child function, and that an error is thrown if there is no matching child function: @@ -1542,17 +1587,17 @@ def test_gen_upd_url(self): ) self.mrt_rv.TYPE = "HcSHqWb3C9i2jZqnzVj1" - self.assertRaises(ValueError, self.mrt_rv.gen_upd_url) + self.assertRaises(ValueError, self.mrt_rv.gen_upd_url, "") self.mrt_rv.TYPE = self.RV_TYPE - def test_gen_upd_url_as57355(self): + def test_gen_upd_url_as57355(self: "test_mrt_archive") -> None: as57355_url = "http://172.17.0.1:8000/lukasz/update/20220101.0000.dump" self.assertEqual( self.mrt_as57355.gen_upd_url_as57355("20220101.0000.dump"), as57355_url, ) - def test_gen_upd_url_ripe(self): + def test_gen_upd_url_ripe(self: "test_mrt_archive") -> None: ripe_url = ( "https://data.ris.ripe.net/rrc23/2022.01/updates.20220101.0000.gz" ) @@ -1561,7 +1606,7 @@ def test_gen_upd_url_ripe(self): ripe_url, ) - def test_gen_upd_url_rv(self): + def test_gen_upd_url_rv(self: "test_mrt_archive") -> None: rv_url = ( "http://archive.routeviews.org/route-views.linx/bgpdata/2022.01/" "UPDATES/updates.20220101.0000.bz2" @@ -1570,11 +1615,11 @@ def test_gen_upd_url_rv(self): self.mrt_rv.gen_upd_url_rv("updates.20220101.0000.bz2"), rv_url ) - def test_gen_upd_url_range(self): + def test_gen_upd_url_range(self: "test_mrt_archive") -> None: as57355_urls = [ - 'http://172.17.0.1:8000/lukasz/update/20220101.2350.dump', - 'http://172.17.0.1:8000/lukasz/update/20220102.0000.dump', - 'http://172.17.0.1:8000/lukasz/update/20220102.0010.dump', + "http://172.17.0.1:8000/lukasz/update/20220101.2350.dump", + "http://172.17.0.1:8000/lukasz/update/20220102.0000.dump", + "http://172.17.0.1:8000/lukasz/update/20220102.0010.dump", ] self.assertEqual( self.mrt_as57355.gen_upd_url_range( @@ -1583,11 +1628,11 @@ def test_gen_upd_url_range(self): as57355_urls, ) ripe_urls = [ - 'https://data.ris.ripe.net/rrc23/2022.01/updates.20220101.2350.gz', - 'https://data.ris.ripe.net/rrc23/2022.01/updates.20220101.2355.gz', - 'https://data.ris.ripe.net/rrc23/2022.01/updates.20220102.0000.gz', - 'https://data.ris.ripe.net/rrc23/2022.01/updates.20220102.0005.gz', - 'https://data.ris.ripe.net/rrc23/2022.01/updates.20220102.0010.gz', + "https://data.ris.ripe.net/rrc23/2022.01/updates.20220101.2350.gz", + "https://data.ris.ripe.net/rrc23/2022.01/updates.20220101.2355.gz", + "https://data.ris.ripe.net/rrc23/2022.01/updates.20220102.0000.gz", + "https://data.ris.ripe.net/rrc23/2022.01/updates.20220102.0005.gz", + "https://data.ris.ripe.net/rrc23/2022.01/updates.20220102.0010.gz", ] self.assertEqual( self.mrt_ripe.gen_upd_url_range( @@ -1616,7 +1661,7 @@ def test_gen_upd_url_range(self): rv_urls, ) - def test_ts_from_filename(self): + def test_ts_from_filename(self: "test_mrt_archive") -> None: """ Check that the wrapper function calls the correct child function, and that an error is thrown if there is no matching child function: @@ -1656,7 +1701,7 @@ def test_ts_from_filename(self): ) self.mrt_rv.TYPE = self.RV_TYPE - def test_ts_from_filename_as57355(self): + def test_ts_from_filename_as57355(self: "test_mrt_archive") -> None: self.assertEqual( self.mrt_as57355.ts_from_filename_as57355( self.mrt_as57355.gen_upd_fn_date("20220101.0000") @@ -1664,7 +1709,7 @@ def test_ts_from_filename_as57355(self): datetime.datetime(2022, 1, 1, 0, 0), ) - def test_ts_from_filename_ripe(self): + def test_ts_from_filename_ripe(self: "test_mrt_archive") -> None: self.assertEqual( self.mrt_ripe.ts_from_filename_ripe( self.mrt_ripe.gen_upd_fn_date("20220101.0000") @@ -1672,7 +1717,7 @@ def test_ts_from_filename_ripe(self): datetime.datetime(2022, 1, 1, 0, 0), ) - def test_ts_from_filename_rv(self): + def test_ts_from_filename_rv(self: "test_mrt_archive") -> None: self.assertEqual( self.mrt_rv.ts_from_filename_rv( self.mrt_rv.gen_upd_fn_date("20220101.0000") @@ -1680,7 +1725,7 @@ def test_ts_from_filename_rv(self): datetime.datetime(2022, 1, 1, 0, 0), ) - def test_valid_ym(self): + def test_valid_ym(self: "test_mrt_archive") -> None: # Missing value self.assertRaises(ValueError, self.mrt_rv.valid_ym, "") # Not a string @@ -1703,7 +1748,7 @@ def test_valid_ym(self): asserted = True self.assertEqual(asserted, False) - def test_valid_ymd(self): + def test_valid_ymd(self: "test_mrt_archive") -> None: # Missing value self.assertRaises(ValueError, self.mrt_rv.valid_ymd, "") # Not a string @@ -1728,7 +1773,7 @@ def test_valid_ymd(self): asserted = True self.assertEqual(asserted, False) - def test_valid_ymd_hm(self): + def test_valid_ymd_hm(self: "test_mrt_archive") -> None: # Missing value self.assertRaises(ValueError, self.mrt_rv.valid_ymd_hm, "") # Not a string @@ -1769,7 +1814,7 @@ def test_valid_ymd_hm(self): asserted = True self.assertEqual(asserted, False) - def test_ymd_from_file_path(self): + def test_ymd_from_file_path(self: "test_mrt_archive") -> None: self.assertEqual( self.mrt_as57355.ymd_from_file_path("20220101.0000.dump"), "20220101", @@ -1800,5 +1845,5 @@ def test_ymd_from_file_path(self): self.mrt_rv.TYPE = self.RV_TYPE -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/dnas/tests/test_mrt_archives.py b/dnas/tests/test_mrt_archives.py index 55c5334..cac3a7f 100644 --- a/dnas/tests/test_mrt_archives.py +++ b/dnas/tests/test_mrt_archives.py @@ -1,13 +1,11 @@ import datetime import os import sys +import typing import unittest sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.config import config as cfg @@ -15,29 +13,26 @@ from dnas.mrt_archives import mrt_archives -class test_mrt_archives (unittest.TestCase): - - - def setUp(self): +class test_mrt_archives(unittest.TestCase): + def setUp(self: "test_mrt_archives") -> None: self.mrt_a = mrt_archives() self.cfg = cfg() - def test_init(self): - + def test_init(self: "test_mrt_archives") -> None: self.assertIsInstance(self.mrt_a, mrt_archives) self.assertIsInstance(self.mrt_a.archives, list) self.assertTrue(len(self.mrt_a.archives) > 0) for entry in self.mrt_a.archives: self.assertIsInstance(entry, mrt_archive) - def test_arch_from_file_path(self): - self.assertRaises(ValueError, self.mrt_a.arch_from_file_path) + def test_arch_from_file_path(self: "test_mrt_archives") -> None: + self.assertRaises(ValueError, self.mrt_a.arch_from_file_path, "") self.assertRaises(TypeError, self.mrt_a.arch_from_file_path, 123) arch = self.mrt_a.archives[0] filename = arch.gen_latest_upd_fn() filepath = os.path.normpath(arch.MRT_DIR + "/" + filename) - + ret = self.mrt_a.arch_from_file_path(filepath) self.assertIsInstance(ret, mrt_archive) self.assertEqual(ret, arch) @@ -46,8 +41,8 @@ def test_arch_from_file_path(self): self.assertIsInstance(ret, bool) self.assertEqual(ret, False) - def test_arch_from_url(self): - self.assertRaises(ValueError, self.mrt_a.arch_from_url) + def test_arch_from_url(self: "test_mrt_archives") -> None: + self.assertRaises(ValueError, self.mrt_a.arch_from_url, "") self.assertRaises(TypeError, self.mrt_a.arch_from_url, 123) arch = self.mrt_a.archives[0] @@ -62,9 +57,9 @@ def test_arch_from_url(self): self.assertIsInstance(ret, bool) self.assertEqual(ret, False) - def test_get_arch_option(self): - self.assertRaises(ValueError, self.mrt_a.get_arch_option) - self.assertRaises(ValueError, self.mrt_a.get_arch_option, "abc") + def test_get_arch_option(self: "test_mrt_archives") -> None: + self.assertRaises(ValueError, self.mrt_a.get_arch_option, "", "") + self.assertRaises(ValueError, self.mrt_a.get_arch_option, "abc", "") self.assertRaises(TypeError, self.mrt_a.get_arch_option, "abc", 123) self.assertRaises( @@ -83,8 +78,8 @@ def test_get_arch_option(self): AttributeError, self.mrt_a.get_arch_option, filepath, "hwiwewohh7" ) - def test_get_day_key(self): - self.assertRaises(ValueError, self.mrt_a.get_day_key) + def test_get_day_key(self: "test_mrt_archives") -> None: + self.assertRaises(ValueError, self.mrt_a.get_day_key, "") self.assertRaises(TypeError, self.mrt_a.get_day_key, 123) self.assertRaises(ValueError, self.mrt_a.get_day_key, "/tmp/03oeiisks") @@ -100,8 +95,8 @@ def test_get_day_key(self): ) self.assertEqual(ret, arch.UPD_KEY + ":" + ymd) - def test_is_rib_from_filename(self): - self.assertRaises(ValueError, self.mrt_a.is_rib_from_filename) + def test_is_rib_from_filename(self: "test_mrt_archives") -> None: + self.assertRaises(ValueError, self.mrt_a.is_rib_from_filename, "") self.assertRaises(TypeError, self.mrt_a.is_rib_from_filename, 123) arch = self.mrt_a.archives[0] filename = arch.gen_latest_rib_fn() @@ -113,5 +108,6 @@ def test_is_rib_from_filename(self): self.assertIsInstance(ret, bool) self.assertEqual(ret, False) -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/dnas/tests/test_mrt_entry.py b/dnas/tests/test_mrt_entry.py index ad2338a..6a8d44f 100644 --- a/dnas/tests/test_mrt_entry.py +++ b/dnas/tests/test_mrt_entry.py @@ -3,6 +3,7 @@ import re import shutil import sys +import typing import unittest sys.path.append( @@ -14,7 +15,7 @@ class test_mrt_entry(unittest.TestCase): - def setUp(self): + def setUp(self: "test_mrt_entry") -> None: self.mrt_e = mrt_entry() """ @@ -50,7 +51,7 @@ def setUp(self): shutil.copy2(self.upd_1_path, self.upd_1_mrt) self.mrt_s = mrt_parser.parse_upd_dump(self.upd_1_mrt) - def test_init(self): + def test_init(self: "test_mrt_entry") -> None: self.assertIsInstance(self.mrt_e, mrt_entry) self.assertIsInstance(self.mrt_e.advt, int) self.assertIsInstance(self.mrt_e.as_path, list) @@ -59,40 +60,40 @@ def test_init(self): self.assertEqual(len(self.mrt_e.comm_set), 0) self.assertIsInstance(self.mrt_e.med, int) self.assertEqual(self.mrt_e.med, -1) - self.assertEqual(self.mrt_e.next_hop, None) - self.assertEqual(self.mrt_e.prefix, None) + self.assertEqual(self.mrt_e.next_hop, "") + self.assertEqual(self.mrt_e.prefix, "") self.assertIsInstance(self.mrt_e.origin_asns, set) self.assertEqual(len(self.mrt_e.origin_asns), 0) - self.assertEqual(self.mrt_e.peer_asn, None) - self.assertEqual(self.mrt_e.timestamp, None) + self.assertEqual(self.mrt_e.peer_asn, "") + self.assertEqual(self.mrt_e.timestamp, "") self.assertIsInstance(self.mrt_e.updates, int) self.assertIsInstance(self.mrt_e.withdraws, int) self.assertIsInstance(self.mrt_e.unknown_attrs, set) self.assertEqual(len(self.mrt_e.unknown_attrs), 0) - def test_equal_to(self): + def test_equal_to(self: "test_mrt_entry") -> None: e1 = copy.deepcopy(self.mrt_s.longest_as_path[0]) e2 = copy.deepcopy(self.mrt_s.longest_as_path[0]) self.assertIsInstance(e1, mrt_entry) self.assertIsInstance(e2, mrt_entry) - self.assertRaises(ValueError, e1.equal_to) - self.assertRaises(TypeError, e1.equal_to, 123) + self.assertRaises(ValueError, e1.equal_to, "") + self.assertRaises(ValueError, e1.equal_to, "", 123) self.assertTrue(e1.equal_to(e2)) - def test_from_json(self): + def test_from_json(self: "test_mrt_entry") -> None: with open(self.entry_1_path) as f: json_data = f.read() self.assertIsInstance(json_data, str) e = mrt_entry() - self.assertRaises(ValueError, e.from_json) + self.assertRaises(ValueError, e.from_json, "") self.assertRaises(TypeError, e.from_json, 123) e.from_json(json_data) self.assertTrue(e.equal_to(self.mrt_s.longest_as_path[0], True)) - def test_gen_timestamp(self): + def test_gen_timestamp(self: "test_mrt_entry") -> None: ret = mrt_entry.gen_timestamp() self.assertIsInstance(ret, str) self.assertTrue( @@ -105,7 +106,7 @@ def test_gen_timestamp(self): ) ) - def test_to_json(self): + def test_to_json(self: "test_mrt_entry") -> None: with open(self.entry_1_path) as f: j1 = f.read() self.assertIsInstance(j1, str) @@ -114,9 +115,9 @@ def test_to_json(self): self.assertIsInstance(j2, str) self.assertEqual(j1, j2) - def tearDown(self): + def tearDown(self: "test_mrt_entry") -> None: os.remove(self.upd_1_mrt) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/dnas/tests/test_mrt_getter.py b/dnas/tests/test_mrt_getter.py index 383fa5f..044f2a4 100644 --- a/dnas/tests/test_mrt_getter.py +++ b/dnas/tests/test_mrt_getter.py @@ -1,13 +1,12 @@ import os -import requests import sys +import typing import unittest +import requests + sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.mrt_getter import mrt_getter from dnas.mrt_parser import mrt_parser @@ -23,7 +22,7 @@ class test_mrt_getter(unittest.TestCase): invalid_path = "http://archive.routeviews.org/mQFk79SrBI29HPUg0EgxFC17nkyZP4" invalid_domain = "https://mQFk79SrBI29HPUg0EgxFC17nkyZP4.com" - def test_download_mrt(self): + def test_download_mrt(self: "test_mrt_getter") -> None: with self.assertRaises(ValueError): mrt_getter.download_mrt() @@ -45,7 +44,7 @@ def test_download_mrt(self): replace = False, url = self.invalid_domain ) - + mrt_getter.download_mrt( filename = self.output_file, replace = False, @@ -55,5 +54,6 @@ def test_download_mrt(self): self.assertEqual(self.no_records, mrt_parser.mrt_count(self.output_file)) """ -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/dnas/tests/test_mrt_parser.py b/dnas/tests/test_mrt_parser.py index f41ee3e..cb16315 100644 --- a/dnas/tests/test_mrt_parser.py +++ b/dnas/tests/test_mrt_parser.py @@ -1,6 +1,7 @@ import os import shutil import sys +import typing import unittest sys.path.append( @@ -8,16 +9,15 @@ ) from dnas.config import config from dnas.mrt_archives import mrt_archives +from dnas.mrt_entry import mrt_entry from dnas.mrt_parser import mrt_parser from dnas.mrt_stats import mrt_stats -from dnas.mrt_entry import mrt_entry class test_mrt_parser(unittest.TestCase): - cfg = config() - def setUp(self): + def setUp(self: "test_mrt_parser") -> None: """ Copy the test files to the location they would be in, if we had downloaded them from the public archives: @@ -83,7 +83,7 @@ def setUp(self): shutil.copy2(self.upd_3_path, self.upd_3_mrt) shutil.copy2(self.upd_4_path, self.upd_4_mrt) - def test_init(self): + def test_init(self: "test_mrt_parser"): """ The mrt_parser class contains only static methods for now, so check that nothing is returned and nothing is raised. @@ -96,7 +96,7 @@ def test_init(self): self.assertEqual(type(mrt_p), mrt_parser) self.assertEqual(asserted, False) - def test_parse_upd_dump(self): + def test_parse_upd_dump(self: "test_mrt_parser") -> None: """ Throughout this function the MRT file being parsed is alternating, because we want to hit all code paths, no one MRT file contains the @@ -776,9 +776,9 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_advt_prefixes[0].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_advt_prefixes[0].next_hop, None) + self.assertEqual(upd_2_stats.most_advt_prefixes[0].next_hop, "") self.assertEqual(upd_2_stats.most_advt_prefixes[0].origin_asns, set()) - self.assertEqual(upd_2_stats.most_advt_prefixes[0].peer_asn, None) + self.assertEqual(upd_2_stats.most_advt_prefixes[0].peer_asn, "") self.assertEqual( upd_2_stats.most_advt_prefixes[0].prefix, "89.30.150.0/23" ) @@ -791,6 +791,9 @@ def test_parse_upd_dump(self): self.assertIsInstance(upd_3_stats.most_bogon_asns, list) self.assertEqual(len(upd_3_stats.most_bogon_asns), 2) for i in range(0, len(upd_3_stats.most_bogon_asns)): + print(f"comparing {i}") + upd_3_stats.most_bogon_asns[i].print() + test_3_stats.most_bogon_asns[i].print() self.assertTrue( upd_3_stats.most_bogon_asns[i].equal_to( test_3_stats.most_bogon_asns[i], meta=True @@ -806,9 +809,9 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_upd_prefixes[0].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_upd_prefixes[0].next_hop, None) + self.assertEqual(upd_2_stats.most_upd_prefixes[0].next_hop, "") self.assertEqual(upd_2_stats.most_upd_prefixes[0].origin_asns, set()) - self.assertEqual(upd_2_stats.most_upd_prefixes[0].peer_asn, None) + self.assertEqual(upd_2_stats.most_upd_prefixes[0].peer_asn, "") self.assertEqual( upd_2_stats.most_upd_prefixes[0].prefix, "89.30.150.0/23" ) @@ -827,9 +830,9 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_withd_prefixes[0].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_withd_prefixes[0].next_hop, None) + self.assertEqual(upd_2_stats.most_withd_prefixes[0].next_hop, "") self.assertEqual(upd_2_stats.most_withd_prefixes[0].origin_asns, set()) - self.assertEqual(upd_2_stats.most_withd_prefixes[0].peer_asn, None) + self.assertEqual(upd_2_stats.most_withd_prefixes[0].peer_asn, "") self.assertEqual( upd_2_stats.most_withd_prefixes[0].prefix, "2a01:9e00:4279::/48" ) @@ -848,12 +851,12 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_advt_origin_asn[0].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_advt_origin_asn[0].next_hop, None) + self.assertEqual(upd_2_stats.most_advt_origin_asn[0].next_hop, "") self.assertEqual( upd_2_stats.most_advt_origin_asn[0].origin_asns, set(["20473"]) ) - self.assertEqual(upd_2_stats.most_advt_origin_asn[0].peer_asn, None) - self.assertEqual(upd_2_stats.most_advt_origin_asn[0].prefix, None) + self.assertEqual(upd_2_stats.most_advt_origin_asn[0].peer_asn, "") + self.assertEqual(upd_2_stats.most_advt_origin_asn[0].prefix, "") self.assertEqual( upd_2_stats.most_advt_origin_asn[0].timestamp, "20220501.2305" ) @@ -869,10 +872,10 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_advt_peer_asn[0].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_advt_peer_asn[0].next_hop, None) + self.assertEqual(upd_2_stats.most_advt_peer_asn[0].next_hop, "") self.assertEqual(upd_2_stats.most_advt_peer_asn[0].origin_asns, set()) self.assertEqual(upd_2_stats.most_advt_peer_asn[0].peer_asn, "18106") - self.assertEqual(upd_2_stats.most_advt_peer_asn[0].prefix, None) + self.assertEqual(upd_2_stats.most_advt_peer_asn[0].prefix, "") self.assertEqual( upd_2_stats.most_advt_peer_asn[0].timestamp, "20220501.2305" ) @@ -888,10 +891,10 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_upd_peer_asn[0].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_upd_peer_asn[0].next_hop, None) + self.assertEqual(upd_2_stats.most_upd_peer_asn[0].next_hop, "") self.assertEqual(upd_2_stats.most_upd_peer_asn[0].origin_asns, set()) self.assertEqual(upd_2_stats.most_upd_peer_asn[0].peer_asn, "18106") - self.assertEqual(upd_2_stats.most_upd_peer_asn[0].prefix, None) + self.assertEqual(upd_2_stats.most_upd_peer_asn[0].prefix, "") self.assertEqual( upd_2_stats.most_upd_peer_asn[0].timestamp, "20220501.2305" ) @@ -907,10 +910,10 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_withd_peer_asn[0].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_withd_peer_asn[0].next_hop, None) + self.assertEqual(upd_2_stats.most_withd_peer_asn[0].next_hop, "") self.assertEqual(upd_2_stats.most_withd_peer_asn[0].origin_asns, set()) self.assertEqual(upd_2_stats.most_withd_peer_asn[0].peer_asn, "18106") - self.assertEqual(upd_2_stats.most_withd_peer_asn[0].prefix, None) + self.assertEqual(upd_2_stats.most_withd_peer_asn[0].prefix, "") self.assertEqual( upd_2_stats.most_withd_peer_asn[0].timestamp, "20220501.2305" ) @@ -926,12 +929,12 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_origin_asns[0].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_origin_asns[0].next_hop, None) + self.assertEqual(upd_2_stats.most_origin_asns[0].next_hop, "") self.assertEqual( upd_2_stats.most_origin_asns[0].origin_asns, set(["28198", "262375"]), ) - self.assertEqual(upd_2_stats.most_origin_asns[0].peer_asn, None) + self.assertEqual(upd_2_stats.most_origin_asns[0].peer_asn, "") self.assertEqual( upd_2_stats.most_origin_asns[0].prefix, "177.131.0.0/21" ) @@ -947,12 +950,12 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_origin_asns[1].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_origin_asns[1].next_hop, None) + self.assertEqual(upd_2_stats.most_origin_asns[1].next_hop, "") self.assertEqual( upd_2_stats.most_origin_asns[1].origin_asns, set(["271204", "266181"]), ) - self.assertEqual(upd_2_stats.most_origin_asns[1].peer_asn, None) + self.assertEqual(upd_2_stats.most_origin_asns[1].peer_asn, "") self.assertEqual( upd_2_stats.most_origin_asns[1].prefix, "179.49.190.0/23" ) @@ -968,12 +971,12 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_origin_asns[2].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_origin_asns[2].next_hop, None) + self.assertEqual(upd_2_stats.most_origin_asns[2].next_hop, "") self.assertEqual( upd_2_stats.most_origin_asns[2].origin_asns, set(["396559", "396542"]), ) - self.assertEqual(upd_2_stats.most_origin_asns[2].peer_asn, None) + self.assertEqual(upd_2_stats.most_origin_asns[2].peer_asn, "") self.assertEqual( upd_2_stats.most_origin_asns[2].prefix, "2620:74:2a::/48" ) @@ -989,11 +992,11 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_origin_asns[3].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_origin_asns[3].next_hop, None) + self.assertEqual(upd_2_stats.most_origin_asns[3].next_hop, "") self.assertEqual( upd_2_stats.most_origin_asns[3].origin_asns, set(["7420", "37154"]) ) - self.assertEqual(upd_2_stats.most_origin_asns[3].peer_asn, None) + self.assertEqual(upd_2_stats.most_origin_asns[3].peer_asn, "") self.assertEqual( upd_2_stats.most_origin_asns[3].prefix, "196.46.192.0/19" ) @@ -1009,11 +1012,11 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_origin_asns[4].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_origin_asns[4].next_hop, None) + self.assertEqual(upd_2_stats.most_origin_asns[4].next_hop, "") self.assertEqual( upd_2_stats.most_origin_asns[4].origin_asns, set(["7487", "54396"]) ) - self.assertEqual(upd_2_stats.most_origin_asns[4].peer_asn, None) + self.assertEqual(upd_2_stats.most_origin_asns[4].peer_asn, "") self.assertEqual( upd_2_stats.most_origin_asns[4].prefix, "205.197.192.0/21" ) @@ -1029,12 +1032,12 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_origin_asns[5].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_origin_asns[5].next_hop, None) + self.assertEqual(upd_2_stats.most_origin_asns[5].next_hop, "") self.assertEqual( upd_2_stats.most_origin_asns[5].origin_asns, set(["203020", "29802"]), ) - self.assertEqual(upd_2_stats.most_origin_asns[5].peer_asn, None) + self.assertEqual(upd_2_stats.most_origin_asns[5].peer_asn, "") self.assertEqual( upd_2_stats.most_origin_asns[5].prefix, "206.123.159.0/24" ) @@ -1050,12 +1053,12 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_origin_asns[6].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_origin_asns[6].next_hop, None) + self.assertEqual(upd_2_stats.most_origin_asns[6].next_hop, "") self.assertEqual( upd_2_stats.most_origin_asns[6].origin_asns, set(["52000", "19318"]), ) - self.assertEqual(upd_2_stats.most_origin_asns[6].peer_asn, None) + self.assertEqual(upd_2_stats.most_origin_asns[6].peer_asn, "") self.assertEqual( upd_2_stats.most_origin_asns[6].prefix, "68.168.210.0/24" ) @@ -1071,12 +1074,12 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_origin_asns[7].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_origin_asns[7].next_hop, None) + self.assertEqual(upd_2_stats.most_origin_asns[7].next_hop, "") self.assertEqual( upd_2_stats.most_origin_asns[7].origin_asns, set(["55020", "137951"]), ) - self.assertEqual(upd_2_stats.most_origin_asns[7].peer_asn, None) + self.assertEqual(upd_2_stats.most_origin_asns[7].peer_asn, "") self.assertEqual( upd_2_stats.most_origin_asns[7].prefix, "156.241.128.0/22" ) @@ -1092,12 +1095,12 @@ def test_parse_upd_dump(self): self.assertEqual( upd_2_stats.most_origin_asns[8].filename, self.upd_2_mrt ) - self.assertEqual(upd_2_stats.most_origin_asns[8].next_hop, None) + self.assertEqual(upd_2_stats.most_origin_asns[8].next_hop, "") self.assertEqual( upd_2_stats.most_origin_asns[8].origin_asns, set(["269208", "268347"]), ) - self.assertEqual(upd_2_stats.most_origin_asns[8].peer_asn, None) + self.assertEqual(upd_2_stats.most_origin_asns[8].peer_asn, "") self.assertEqual( upd_2_stats.most_origin_asns[8].prefix, "2804:610c::/32" ) @@ -1152,5 +1155,5 @@ def test_parse_upd_dump(self): self.assertEqual(upd_2_stats.timestamp, "20220501.2305") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/dnas/tests/test_mrt_splitter.py b/dnas/tests/test_mrt_splitter.py index a4527eb..6d11b69 100644 --- a/dnas/tests/test_mrt_splitter.py +++ b/dnas/tests/test_mrt_splitter.py @@ -1,20 +1,19 @@ -import bz2 import gzip -import io import os import sys +import typing import unittest sys.path.append( os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) -from dnas.mrt_splitter import mrt_splitter from dnas.mrt_parser import mrt_parser +from dnas.mrt_splitter import mrt_splitter class test_mrt_splitter(unittest.TestCase): - def setUp(self): + def setUp(self: "test_mrt_splitter") -> None: self.no_of_chunks = 8 self.gz_filename = os.path.join( os.path.dirname(os.path.realpath(__file__)), @@ -23,7 +22,7 @@ def setUp(self): ) self.file_size = 30285 - def test_init(self): + def test_init(self: "test_mrt_splitter") -> None: self.assertRaises(ValueError, mrt_splitter, "") self.assertRaises(TypeError, mrt_splitter, 1.23) self.assertRaises( @@ -41,12 +40,15 @@ def test_init(self): except StopIteration: pass - def test_split(self): - + def test_split(self: "test_mrt_splitter") -> None: splitter = mrt_splitter(self.gz_filename) - self.assertRaises(ValueError, splitter.split, -1) - total, chunk_names = splitter.split(self.no_of_chunks) + self.assertRaises(ValueError, splitter.split, -1, -1) + self.assertRaises(TypeError, splitter.split, -1) + total, chunk_names = splitter.split( + no_chunks=self.no_of_chunks, + outdir=os.path.dirname(splitter.filename), + ) self.assertTrue(isinstance(total, int)) self.assertEqual(total, self.file_size) @@ -67,5 +69,5 @@ def test_split(self): os.unlink(filename) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/dnas/tests/test_mrt_stats.py b/dnas/tests/test_mrt_stats.py index c7d5ccb..5d1c540 100644 --- a/dnas/tests/test_mrt_stats.py +++ b/dnas/tests/test_mrt_stats.py @@ -3,19 +3,20 @@ import re import shutil import sys +import typing import unittest sys.path.append( os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.mrt_archives import mrt_archives -from dnas.mrt_stats import mrt_stats from dnas.mrt_entry import mrt_entry from dnas.mrt_parser import mrt_parser +from dnas.mrt_stats import mrt_stats class test_mrt_stats(unittest.TestCase): - def setUp(self): + def setUp(self: "test_mrt_stats") -> None: """ Copy the test files to the location they would be in, if we had downloaded them from the public archives: @@ -133,7 +134,7 @@ def setUp(self): "rcc23.updates.20220421.0200.gz.test", ) - def test_init(self): + def test_init(self: "test_mrt_stats") -> None: mrt_s = mrt_stats() self.assertIsInstance(mrt_s, mrt_stats) self.assertIsInstance(mrt_s.archive_list, set) @@ -181,7 +182,7 @@ def test_init(self): self.assertIsInstance(mrt_s.total_withd, int) self.assertEqual(mrt_s.total_withd, 0) - def test_add(self): + def test_add(self: "test_mrt_stats") -> None: add_stats_1 = mrt_parser.parse_upd_dump(self.upd_1_mrt) self.assertIsInstance(add_stats_1, mrt_stats) @@ -1203,9 +1204,9 @@ def test_add(self): os.path.basename(add_stats_1.most_advt_prefixes[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(add_stats_1.most_advt_prefixes[0].next_hop, None) + self.assertEqual(add_stats_1.most_advt_prefixes[0].next_hop, "") self.assertEqual(add_stats_1.most_advt_prefixes[0].origin_asns, set()) - self.assertEqual(add_stats_1.most_advt_prefixes[0].peer_asn, None) + self.assertEqual(add_stats_1.most_advt_prefixes[0].peer_asn, "") self.assertEqual( add_stats_1.most_advt_prefixes[0].prefix, "89.30.150.0/23" ) @@ -1223,12 +1224,12 @@ def test_add(self): os.path.basename(add_stats_3.most_bogon_asns[0].filename), os.path.basename(self.upd_3_mrt), ) - self.assertEqual(add_stats_3.most_bogon_asns[0].next_hop, None) + self.assertEqual(add_stats_3.most_bogon_asns[0].next_hop, "") self.assertEqual( add_stats_3.most_bogon_asns[0].origin_asns, set(["23456"]) ) - self.assertEqual(add_stats_3.most_bogon_asns[0].peer_asn, None) - self.assertEqual(add_stats_3.most_bogon_asns[0].prefix, None) + self.assertEqual(add_stats_3.most_bogon_asns[0].peer_asn, "") + self.assertEqual(add_stats_3.most_bogon_asns[0].prefix, "") self.assertEqual( add_stats_3.most_bogon_asns[0].timestamp, "20220601.0230" ) @@ -1242,12 +1243,12 @@ def test_add(self): os.path.basename(add_stats_3.most_bogon_asns[1].filename), os.path.basename(self.upd_4_mrt), ) - self.assertEqual(add_stats_3.most_bogon_asns[1].next_hop, None) + self.assertEqual(add_stats_3.most_bogon_asns[1].next_hop, "") self.assertEqual( add_stats_3.most_bogon_asns[1].origin_asns, set(["65005"]) ) - self.assertEqual(add_stats_3.most_bogon_asns[1].peer_asn, None) - self.assertEqual(add_stats_3.most_bogon_asns[1].prefix, None) + self.assertEqual(add_stats_3.most_bogon_asns[1].peer_asn, "") + self.assertEqual(add_stats_3.most_bogon_asns[1].prefix, "") self.assertEqual( add_stats_3.most_bogon_asns[1].timestamp, "20220601.0415" ) @@ -1261,12 +1262,12 @@ def test_add(self): os.path.basename(add_stats_3.most_bogon_asns[2].filename), os.path.basename(self.upd_4_mrt), ) - self.assertEqual(add_stats_3.most_bogon_asns[2].next_hop, None) + self.assertEqual(add_stats_3.most_bogon_asns[2].next_hop, "") self.assertEqual( add_stats_3.most_bogon_asns[2].origin_asns, set(["65530"]) ) - self.assertEqual(add_stats_3.most_bogon_asns[2].peer_asn, None) - self.assertEqual(add_stats_3.most_bogon_asns[2].prefix, None) + self.assertEqual(add_stats_3.most_bogon_asns[2].peer_asn, "") + self.assertEqual(add_stats_3.most_bogon_asns[2].prefix, "") self.assertEqual( add_stats_3.most_bogon_asns[2].timestamp, "20220601.0415" ) @@ -1281,9 +1282,9 @@ def test_add(self): os.path.basename(add_stats_1.most_upd_prefixes[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(add_stats_1.most_upd_prefixes[0].next_hop, None) + self.assertEqual(add_stats_1.most_upd_prefixes[0].next_hop, "") self.assertEqual(add_stats_1.most_upd_prefixes[0].origin_asns, set()) - self.assertEqual(add_stats_1.most_upd_prefixes[0].peer_asn, None) + self.assertEqual(add_stats_1.most_upd_prefixes[0].peer_asn, "") self.assertEqual( add_stats_1.most_upd_prefixes[0].prefix, "89.30.150.0/23" ) @@ -1301,9 +1302,9 @@ def test_add(self): os.path.basename(add_stats_1.most_withd_prefixes[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(add_stats_1.most_withd_prefixes[0].next_hop, None) + self.assertEqual(add_stats_1.most_withd_prefixes[0].next_hop, "") self.assertEqual(add_stats_1.most_withd_prefixes[0].origin_asns, set()) - self.assertEqual(add_stats_1.most_withd_prefixes[0].peer_asn, None) + self.assertEqual(add_stats_1.most_withd_prefixes[0].peer_asn, "") self.assertEqual( add_stats_1.most_withd_prefixes[0].prefix, "2a01:9e00:4279::/48" ) @@ -1321,12 +1322,12 @@ def test_add(self): os.path.basename(add_stats_1.most_advt_origin_asn[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(add_stats_1.most_advt_origin_asn[0].next_hop, None) + self.assertEqual(add_stats_1.most_advt_origin_asn[0].next_hop, "") self.assertEqual( add_stats_1.most_advt_origin_asn[0].origin_asns, set(["20473"]) ) - self.assertEqual(add_stats_1.most_advt_origin_asn[0].peer_asn, None) - self.assertEqual(add_stats_1.most_advt_origin_asn[0].prefix, None) + self.assertEqual(add_stats_1.most_advt_origin_asn[0].peer_asn, "") + self.assertEqual(add_stats_1.most_advt_origin_asn[0].prefix, "") self.assertEqual( add_stats_1.most_advt_origin_asn[0].timestamp, "20220501.2305" ) @@ -1341,10 +1342,10 @@ def test_add(self): os.path.basename(add_stats_1.most_advt_peer_asn[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(add_stats_1.most_advt_peer_asn[0].next_hop, None) + self.assertEqual(add_stats_1.most_advt_peer_asn[0].next_hop, "") self.assertEqual(add_stats_1.most_advt_peer_asn[0].origin_asns, set()) self.assertEqual(add_stats_1.most_advt_peer_asn[0].peer_asn, "18106") - self.assertEqual(add_stats_1.most_advt_peer_asn[0].prefix, None) + self.assertEqual(add_stats_1.most_advt_peer_asn[0].prefix, "") self.assertEqual( add_stats_1.most_advt_peer_asn[0].timestamp, "20220501.2305" ) @@ -1359,10 +1360,10 @@ def test_add(self): os.path.basename(add_stats_1.most_upd_peer_asn[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(add_stats_1.most_upd_peer_asn[0].next_hop, None) + self.assertEqual(add_stats_1.most_upd_peer_asn[0].next_hop, "") self.assertEqual(add_stats_1.most_upd_peer_asn[0].origin_asns, set()) self.assertEqual(add_stats_1.most_upd_peer_asn[0].peer_asn, "18106") - self.assertEqual(add_stats_1.most_upd_peer_asn[0].prefix, None) + self.assertEqual(add_stats_1.most_upd_peer_asn[0].prefix, "") self.assertEqual( add_stats_1.most_upd_peer_asn[0].timestamp, "20220501.2305" ) @@ -1377,10 +1378,10 @@ def test_add(self): os.path.basename(add_stats_1.most_withd_peer_asn[0].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(add_stats_1.most_withd_peer_asn[0].next_hop, None) + self.assertEqual(add_stats_1.most_withd_peer_asn[0].next_hop, "") self.assertEqual(add_stats_1.most_withd_peer_asn[0].origin_asns, set()) self.assertEqual(add_stats_1.most_withd_peer_asn[0].peer_asn, "133210") - self.assertEqual(add_stats_1.most_withd_peer_asn[0].prefix, None) + self.assertEqual(add_stats_1.most_withd_peer_asn[0].prefix, "") self.assertEqual( add_stats_1.most_withd_peer_asn[0].timestamp, "20220421.0200" ) @@ -1395,12 +1396,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[0].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[0].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[0].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[0].origin_asns, set(["61424", "58143"]), ) - self.assertEqual(add_stats_1.most_origin_asns[0].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[0].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[0].prefix, "5.35.174.0/24" ) @@ -1417,12 +1418,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[1].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[1].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[1].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[1].origin_asns, set(["28198", "262375"]), ) - self.assertEqual(add_stats_1.most_origin_asns[1].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[1].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[1].prefix, "177.131.0.0/21" ) @@ -1439,12 +1440,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[2].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[2].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[2].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[2].origin_asns, set(["396559", "396542"]), ) - self.assertEqual(add_stats_1.most_origin_asns[2].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[2].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[2].prefix, "2620:74:2a::/48" ) @@ -1461,12 +1462,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[3].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[3].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[3].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[3].origin_asns, set(["138346", "134382"]), ) - self.assertEqual(add_stats_1.most_origin_asns[3].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[3].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[3].prefix, "103.88.233.0/24" ) @@ -1483,11 +1484,11 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[4].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[4].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[4].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[4].origin_asns, set(["37154", "7420"]) ) - self.assertEqual(add_stats_1.most_origin_asns[4].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[4].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[4].prefix, "196.46.192.0/19" ) @@ -1504,12 +1505,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[5].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[5].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[5].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[5].origin_asns, set(["136561", "59362"]), ) - self.assertEqual(add_stats_1.most_origin_asns[5].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[5].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[5].prefix, "123.253.98.0/23" ) @@ -1526,12 +1527,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[6].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[6].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[6].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[6].origin_asns, set(["132608", "17806"]), ) - self.assertEqual(add_stats_1.most_origin_asns[6].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[6].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[6].prefix, "114.130.38.0/24" ) @@ -1548,12 +1549,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[7].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[7].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[7].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[7].origin_asns, set(["136907", "55990"]), ) - self.assertEqual(add_stats_1.most_origin_asns[7].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[7].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[7].prefix, "124.71.250.0/24" ) @@ -1570,12 +1571,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[8].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[8].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[8].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[8].origin_asns, set(["136907", "55990"]), ) - self.assertEqual(add_stats_1.most_origin_asns[8].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[8].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[8].prefix, "139.9.98.0/24" ) @@ -1592,11 +1593,11 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[9].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[9].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[9].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[9].origin_asns, set(["7545", "4739"]) ) - self.assertEqual(add_stats_1.most_origin_asns[9].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[9].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[9].prefix, "203.19.254.0/24" ) @@ -1613,12 +1614,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[10].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[10].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[10].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[10].origin_asns, set(["271204", "266181"]), ) - self.assertEqual(add_stats_1.most_origin_asns[10].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[10].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[10].prefix, "179.49.190.0/23" ) @@ -1635,12 +1636,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[11].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[11].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[11].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[11].origin_asns, set(["7487", "54396"]), ) - self.assertEqual(add_stats_1.most_origin_asns[11].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[11].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[11].prefix, "205.197.192.0/21" ) @@ -1657,12 +1658,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[12].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[12].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[12].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[12].origin_asns, set(["203020", "29802"]), ) - self.assertEqual(add_stats_1.most_origin_asns[12].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[12].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[12].prefix, "206.123.159.0/24" ) @@ -1679,12 +1680,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[13].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[13].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[13].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[13].origin_asns, set(["52000", "19318"]), ) - self.assertEqual(add_stats_1.most_origin_asns[13].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[13].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[13].prefix, "68.168.210.0/24" ) @@ -1701,12 +1702,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[14].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[14].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[14].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[14].origin_asns, set(["55020", "137951"]), ) - self.assertEqual(add_stats_1.most_origin_asns[14].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[14].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[14].prefix, "156.241.128.0/22" ) @@ -1723,12 +1724,12 @@ def test_add(self): os.path.basename(add_stats_1.most_origin_asns[15].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(add_stats_1.most_origin_asns[15].next_hop, None) + self.assertEqual(add_stats_1.most_origin_asns[15].next_hop, "") self.assertEqual( add_stats_1.most_origin_asns[15].origin_asns, set(["269208", "268347"]), ) - self.assertEqual(add_stats_1.most_origin_asns[15].peer_asn, None) + self.assertEqual(add_stats_1.most_origin_asns[15].peer_asn, "") self.assertEqual( add_stats_1.most_origin_asns[15].prefix, "2804:610c::/32" ) @@ -1783,7 +1784,7 @@ def test_add(self): ) self.assertEqual(add_stats_1.timestamp, "20220501.2305") - def test_equal_to(self): + def test_equal_to(self: "test_mrt_stats") -> None: stats = mrt_parser.parse_upd_dump(self.upd_1_mrt) self.assertRaises(ValueError, stats.equal_to, None) @@ -1795,7 +1796,7 @@ def test_equal_to(self): stats = mrt_parser.parse_upd_dump(self.upd_5_mrt) self.assertTrue(stats.equal_to(self.upd_5_stats)) - def test_from_file(self): + def test_from_file(self: "test_mrt_stats") -> None: stats = mrt_stats() self.assertRaises(ValueError, stats.from_file, None) self.assertRaises(TypeError, stats.from_file, 123) @@ -1813,7 +1814,7 @@ def test_from_file(self): self.assertIsInstance(stats, mrt_stats) self.assertTrue(self.upd_5_stats.equal_to(stats)) - def test_from_json(self): + def test_from_json(self: "test_mrt_stats") -> None: stats = mrt_stats() self.assertRaises(ValueError, stats.from_json, None) self.assertRaises(TypeError, stats.from_json, 123) @@ -1842,7 +1843,7 @@ def test_from_json(self): f.close() self.assertTrue(stats.equal_to(self.upd_5_stats)) - def test_gen_ts_from_ymd(self): + def test_gen_ts_from_ymd(self: "test_mrt_stats") -> None: self.assertRaises(ValueError, self.upd_1_stats.gen_ts_from_ymd, None) self.assertRaises(TypeError, self.upd_1_stats.gen_ts_from_ymd, 123) ret = self.upd_1_stats.gen_ts_from_ymd("20220228") @@ -1858,26 +1859,26 @@ def test_gen_ts_from_ymd(self): ) ) - def test_gen_daily_key(self): + def test_gen_daily_key(self: "test_mrt_stats") -> None: self.assertRaises(ValueError, self.upd_1_stats.gen_daily_key, None) self.assertRaises(TypeError, self.upd_1_stats.gen_daily_key, 123) ret = self.upd_1_stats.gen_daily_key("20220228") self.assertIsInstance(ret, str) self.assertEqual(ret, "DAILY:20220228") - def test_gen_diff_key(self): + def test_gen_diff_key(self: "test_mrt_stats") -> None: self.assertRaises(ValueError, self.upd_1_stats.gen_diff_key, None) self.assertRaises(TypeError, self.upd_1_stats.gen_diff_key, 123) ret = self.upd_1_stats.gen_diff_key("20220228") self.assertIsInstance(ret, str) self.assertEqual(ret, "DAILY_DIFF:20220228") - def test_gen_global_key(self): + def test_gen_global_key(self: "test_mrt_stats") -> None: ret = self.upd_1_stats.gen_global_key() self.assertIsInstance(ret, str) self.assertEqual(ret, "GLOBAL") - def test_get_diff(self): + def test_get_diff(self: "test_mrt_stats") -> None: self.assertRaises(ValueError, self.upd_1_stats.get_diff, None) self.assertRaises(TypeError, self.upd_1_stats.get_diff, 123) @@ -2480,9 +2481,9 @@ def test_get_diff(self): os.path.basename(diff_1.most_advt_prefixes[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_advt_prefixes[0].next_hop, None) + self.assertEqual(diff_1.most_advt_prefixes[0].next_hop, "") self.assertEqual(diff_1.most_advt_prefixes[0].origin_asns, set()) - self.assertEqual(diff_1.most_advt_prefixes[0].peer_asn, None) + self.assertEqual(diff_1.most_advt_prefixes[0].peer_asn, "") self.assertEqual(diff_1.most_advt_prefixes[0].prefix, "89.30.150.0/23") self.assertEqual( diff_1.most_advt_prefixes[0].timestamp, "20220501.2305" @@ -2499,10 +2500,10 @@ def test_get_diff(self): os.path.basename(diff_3.most_bogon_asns[0].filename), os.path.basename(self.upd_4_mrt), ) - self.assertEqual(diff_3.most_bogon_asns[0].next_hop, None) + self.assertEqual(diff_3.most_bogon_asns[0].next_hop, "") self.assertEqual(diff_3.most_bogon_asns[0].origin_asns, set(["65005"])) - self.assertEqual(diff_3.most_bogon_asns[0].peer_asn, None) - self.assertEqual(diff_3.most_bogon_asns[0].prefix, None) + self.assertEqual(diff_3.most_bogon_asns[0].peer_asn, "") + self.assertEqual(diff_3.most_bogon_asns[0].prefix, "") self.assertEqual(diff_3.most_bogon_asns[0].timestamp, "20220601.0415") self.assertEqual(diff_3.most_bogon_asns[0].updates, 0) self.assertEqual(diff_3.most_bogon_asns[0].withdraws, 0) @@ -2514,10 +2515,10 @@ def test_get_diff(self): os.path.basename(diff_3.most_bogon_asns[1].filename), os.path.basename(self.upd_4_mrt), ) - self.assertEqual(diff_3.most_bogon_asns[1].next_hop, None) + self.assertEqual(diff_3.most_bogon_asns[1].next_hop, "") self.assertEqual(diff_3.most_bogon_asns[1].origin_asns, set(["65530"])) - self.assertEqual(diff_3.most_bogon_asns[1].peer_asn, None) - self.assertEqual(diff_3.most_bogon_asns[1].prefix, None) + self.assertEqual(diff_3.most_bogon_asns[1].peer_asn, "") + self.assertEqual(diff_3.most_bogon_asns[1].prefix, "") self.assertEqual(diff_3.most_bogon_asns[1].timestamp, "20220601.0415") self.assertEqual(diff_3.most_bogon_asns[1].updates, 0) self.assertEqual(diff_3.most_bogon_asns[1].withdraws, 0) @@ -2530,9 +2531,9 @@ def test_get_diff(self): os.path.basename(diff_1.most_upd_prefixes[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_upd_prefixes[0].next_hop, None) + self.assertEqual(diff_1.most_upd_prefixes[0].next_hop, "") self.assertEqual(diff_1.most_upd_prefixes[0].origin_asns, set()) - self.assertEqual(diff_1.most_upd_prefixes[0].peer_asn, None) + self.assertEqual(diff_1.most_upd_prefixes[0].peer_asn, "") self.assertEqual(diff_1.most_upd_prefixes[0].prefix, "89.30.150.0/23") self.assertEqual( diff_1.most_upd_prefixes[0].timestamp, "20220501.2305" @@ -2548,9 +2549,9 @@ def test_get_diff(self): os.path.basename(diff_1.most_withd_prefixes[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_withd_prefixes[0].next_hop, None) + self.assertEqual(diff_1.most_withd_prefixes[0].next_hop, "") self.assertEqual(diff_1.most_withd_prefixes[0].origin_asns, set()) - self.assertEqual(diff_1.most_withd_prefixes[0].peer_asn, None) + self.assertEqual(diff_1.most_withd_prefixes[0].peer_asn, "") self.assertEqual( diff_1.most_withd_prefixes[0].prefix, "2a01:9e00:4279::/48" ) @@ -2568,12 +2569,12 @@ def test_get_diff(self): os.path.basename(diff_1.most_advt_origin_asn[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_advt_origin_asn[0].next_hop, None) + self.assertEqual(diff_1.most_advt_origin_asn[0].next_hop, "") self.assertEqual( diff_1.most_advt_origin_asn[0].origin_asns, set(["20473"]) ) - self.assertEqual(diff_1.most_advt_origin_asn[0].peer_asn, None) - self.assertEqual(diff_1.most_advt_origin_asn[0].prefix, None) + self.assertEqual(diff_1.most_advt_origin_asn[0].peer_asn, "") + self.assertEqual(diff_1.most_advt_origin_asn[0].prefix, "") self.assertEqual( diff_1.most_advt_origin_asn[0].timestamp, "20220501.2305" ) @@ -2588,10 +2589,10 @@ def test_get_diff(self): os.path.basename(diff_1.most_advt_peer_asn[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_advt_peer_asn[0].next_hop, None) + self.assertEqual(diff_1.most_advt_peer_asn[0].next_hop, "") self.assertEqual(diff_1.most_advt_peer_asn[0].origin_asns, set()) self.assertEqual(diff_1.most_advt_peer_asn[0].peer_asn, "18106") - self.assertEqual(diff_1.most_advt_peer_asn[0].prefix, None) + self.assertEqual(diff_1.most_advt_peer_asn[0].prefix, "") self.assertEqual( diff_1.most_advt_peer_asn[0].timestamp, "20220501.2305" ) @@ -2606,10 +2607,10 @@ def test_get_diff(self): os.path.basename(diff_1.most_upd_peer_asn[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_upd_peer_asn[0].next_hop, None) + self.assertEqual(diff_1.most_upd_peer_asn[0].next_hop, "") self.assertEqual(diff_1.most_upd_peer_asn[0].origin_asns, set()) self.assertEqual(diff_1.most_upd_peer_asn[0].peer_asn, "18106") - self.assertEqual(diff_1.most_upd_peer_asn[0].prefix, None) + self.assertEqual(diff_1.most_upd_peer_asn[0].prefix, "") self.assertEqual( diff_1.most_upd_peer_asn[0].timestamp, "20220501.2305" ) @@ -2624,10 +2625,10 @@ def test_get_diff(self): os.path.basename(diff_1.most_withd_peer_asn[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_withd_peer_asn[0].next_hop, None) + self.assertEqual(diff_1.most_withd_peer_asn[0].next_hop, "") self.assertEqual(diff_1.most_withd_peer_asn[0].origin_asns, set()) self.assertEqual(diff_1.most_withd_peer_asn[0].peer_asn, "18106") - self.assertEqual(diff_1.most_withd_peer_asn[0].prefix, None) + self.assertEqual(diff_1.most_withd_peer_asn[0].prefix, "") self.assertEqual( diff_1.most_withd_peer_asn[0].timestamp, "20220501.2305" ) @@ -2642,11 +2643,11 @@ def test_get_diff(self): os.path.basename(diff_1.most_origin_asns[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_origin_asns[0].next_hop, None) + self.assertEqual(diff_1.most_origin_asns[0].next_hop, "") self.assertEqual( diff_1.most_origin_asns[0].origin_asns, set(["28198", "262375"]) ) - self.assertEqual(diff_1.most_origin_asns[0].peer_asn, None) + self.assertEqual(diff_1.most_origin_asns[0].peer_asn, "") self.assertEqual(diff_1.most_origin_asns[0].prefix, "177.131.0.0/21") self.assertEqual(diff_1.most_origin_asns[0].timestamp, "20220501.2305") self.assertEqual(diff_1.most_origin_asns[0].updates, 0) @@ -2659,11 +2660,11 @@ def test_get_diff(self): os.path.basename(diff_1.most_origin_asns[1].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_origin_asns[1].next_hop, None) + self.assertEqual(diff_1.most_origin_asns[1].next_hop, "") self.assertEqual( diff_1.most_origin_asns[1].origin_asns, set(["271204", "266181"]) ) - self.assertEqual(diff_1.most_origin_asns[1].peer_asn, None) + self.assertEqual(diff_1.most_origin_asns[1].peer_asn, "") self.assertEqual(diff_1.most_origin_asns[1].prefix, "179.49.190.0/23") self.assertEqual(diff_1.most_origin_asns[1].timestamp, "20220501.2305") self.assertEqual(diff_1.most_origin_asns[1].updates, 0) @@ -2676,11 +2677,11 @@ def test_get_diff(self): os.path.basename(diff_1.most_origin_asns[2].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_origin_asns[2].next_hop, None) + self.assertEqual(diff_1.most_origin_asns[2].next_hop, "") self.assertEqual( diff_1.most_origin_asns[2].origin_asns, set(["396559", "396542"]) ) - self.assertEqual(diff_1.most_origin_asns[2].peer_asn, None) + self.assertEqual(diff_1.most_origin_asns[2].peer_asn, "") self.assertEqual(diff_1.most_origin_asns[2].prefix, "2620:74:2a::/48") self.assertEqual(diff_1.most_origin_asns[2].timestamp, "20220501.2305") self.assertEqual(diff_1.most_origin_asns[2].updates, 0) @@ -2693,11 +2694,11 @@ def test_get_diff(self): os.path.basename(diff_1.most_origin_asns[3].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_origin_asns[3].next_hop, None) + self.assertEqual(diff_1.most_origin_asns[3].next_hop, "") self.assertEqual( diff_1.most_origin_asns[3].origin_asns, set(["37154", "7420"]) ) - self.assertEqual(diff_1.most_origin_asns[3].peer_asn, None) + self.assertEqual(diff_1.most_origin_asns[3].peer_asn, "") self.assertEqual(diff_1.most_origin_asns[3].prefix, "196.46.192.0/19") self.assertEqual(diff_1.most_origin_asns[3].timestamp, "20220501.2305") self.assertEqual(diff_1.most_origin_asns[3].updates, 0) @@ -2710,11 +2711,11 @@ def test_get_diff(self): os.path.basename(diff_1.most_origin_asns[4].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_origin_asns[4].next_hop, None) + self.assertEqual(diff_1.most_origin_asns[4].next_hop, "") self.assertEqual( diff_1.most_origin_asns[4].origin_asns, set(["7487", "54396"]) ) - self.assertEqual(diff_1.most_origin_asns[4].peer_asn, None) + self.assertEqual(diff_1.most_origin_asns[4].peer_asn, "") self.assertEqual(diff_1.most_origin_asns[4].prefix, "205.197.192.0/21") self.assertEqual(diff_1.most_origin_asns[4].timestamp, "20220501.2305") self.assertEqual(diff_1.most_origin_asns[4].updates, 0) @@ -2728,11 +2729,11 @@ def test_get_diff(self): os.path.basename(self.upd_2_mrt), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_origin_asns[5].next_hop, None) + self.assertEqual(diff_1.most_origin_asns[5].next_hop, "") self.assertEqual( diff_1.most_origin_asns[5].origin_asns, set(["203020", "29802"]) ) - self.assertEqual(diff_1.most_origin_asns[5].peer_asn, None) + self.assertEqual(diff_1.most_origin_asns[5].peer_asn, "") self.assertEqual(diff_1.most_origin_asns[5].prefix, "206.123.159.0/24") self.assertEqual(diff_1.most_origin_asns[5].timestamp, "20220501.2305") self.assertEqual(diff_1.most_origin_asns[5].updates, 0) @@ -2745,11 +2746,11 @@ def test_get_diff(self): os.path.basename(diff_1.most_origin_asns[6].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_origin_asns[6].next_hop, None) + self.assertEqual(diff_1.most_origin_asns[6].next_hop, "") self.assertEqual( diff_1.most_origin_asns[6].origin_asns, set(["52000", "19318"]) ) - self.assertEqual(diff_1.most_origin_asns[6].peer_asn, None) + self.assertEqual(diff_1.most_origin_asns[6].peer_asn, "") self.assertEqual(diff_1.most_origin_asns[6].prefix, "68.168.210.0/24") self.assertEqual(diff_1.most_origin_asns[6].timestamp, "20220501.2305") self.assertEqual(diff_1.most_origin_asns[6].updates, 0) @@ -2763,11 +2764,11 @@ def test_get_diff(self): os.path.basename(self.upd_2_mrt), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_origin_asns[7].next_hop, None) + self.assertEqual(diff_1.most_origin_asns[7].next_hop, "") self.assertEqual( diff_1.most_origin_asns[7].origin_asns, set(["55020", "137951"]) ) - self.assertEqual(diff_1.most_origin_asns[7].peer_asn, None) + self.assertEqual(diff_1.most_origin_asns[7].peer_asn, "") self.assertEqual(diff_1.most_origin_asns[7].prefix, "156.241.128.0/22") self.assertEqual(diff_1.most_origin_asns[7].timestamp, "20220501.2305") self.assertEqual(diff_1.most_origin_asns[7].updates, 0) @@ -2781,11 +2782,11 @@ def test_get_diff(self): os.path.basename(self.upd_2_mrt), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_origin_asns[8].next_hop, None) + self.assertEqual(diff_1.most_origin_asns[8].next_hop, "") self.assertEqual( diff_1.most_origin_asns[8].origin_asns, set(["269208", "268347"]) ) - self.assertEqual(diff_1.most_origin_asns[8].peer_asn, None) + self.assertEqual(diff_1.most_origin_asns[8].peer_asn, "") self.assertEqual(diff_1.most_origin_asns[8].prefix, "2804:610c::/32") self.assertEqual(diff_1.most_origin_asns[8].timestamp, "20220501.2305") self.assertEqual(diff_1.most_origin_asns[8].updates, 0) @@ -2831,7 +2832,7 @@ def test_get_diff(self): self.assertEqual(diff_1.file_list, []) self.assertEqual(diff_1.timestamp, "") - def test_get_diff_larger(self): + def test_get_diff_larger(self: "test_mrt_stats") -> None: self.assertRaises(ValueError, self.upd_1_stats.get_diff_larger, None) self.assertRaises(TypeError, self.upd_1_stats.get_diff_larger, 123) @@ -2868,9 +2869,9 @@ def test_get_diff_larger(self): os.path.basename(self.upd_2_mrt), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_advt_prefixes[0].next_hop, None) + self.assertEqual(diff_1.most_advt_prefixes[0].next_hop, "") self.assertEqual(diff_1.most_advt_prefixes[0].origin_asns, set()) - self.assertEqual(diff_1.most_advt_prefixes[0].peer_asn, None) + self.assertEqual(diff_1.most_advt_prefixes[0].peer_asn, "") self.assertEqual(diff_1.most_advt_prefixes[0].prefix, "89.30.150.0/23") self.assertEqual( diff_1.most_advt_prefixes[0].timestamp, "20220501.2305" @@ -2896,9 +2897,9 @@ def test_get_diff_larger(self): os.path.basename(self.upd_2_mrt), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_upd_prefixes[0].next_hop, None) + self.assertEqual(diff_1.most_upd_prefixes[0].next_hop, "") self.assertEqual(diff_1.most_upd_prefixes[0].origin_asns, set()) - self.assertEqual(diff_1.most_upd_prefixes[0].peer_asn, None) + self.assertEqual(diff_1.most_upd_prefixes[0].peer_asn, "") self.assertEqual(diff_1.most_upd_prefixes[0].prefix, "89.30.150.0/23") self.assertEqual( diff_1.most_upd_prefixes[0].timestamp, "20220501.2305" @@ -2918,9 +2919,9 @@ def test_get_diff_larger(self): os.path.basename(self.upd_2_mrt), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_withd_prefixes[0].next_hop, None) + self.assertEqual(diff_1.most_withd_prefixes[0].next_hop, "") self.assertEqual(diff_1.most_withd_prefixes[0].origin_asns, set()) - self.assertEqual(diff_1.most_withd_prefixes[0].peer_asn, None) + self.assertEqual(diff_1.most_withd_prefixes[0].peer_asn, "") self.assertEqual( diff_1.most_withd_prefixes[0].prefix, "2a01:9e00:4279::/48" ) @@ -2939,12 +2940,12 @@ def test_get_diff_larger(self): os.path.basename(self.upd_2_mrt), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_advt_origin_asn[0].next_hop, None) + self.assertEqual(diff_1.most_advt_origin_asn[0].next_hop, "") self.assertEqual( diff_1.most_advt_origin_asn[0].origin_asns, set(["20473"]) ) - self.assertEqual(diff_1.most_advt_origin_asn[0].peer_asn, None) - self.assertEqual(diff_1.most_advt_origin_asn[0].prefix, None) + self.assertEqual(diff_1.most_advt_origin_asn[0].peer_asn, "") + self.assertEqual(diff_1.most_advt_origin_asn[0].prefix, "") self.assertEqual( diff_1.most_advt_origin_asn[0].timestamp, "20220501.2305" ) @@ -2960,10 +2961,10 @@ def test_get_diff_larger(self): os.path.basename(self.upd_2_mrt), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_advt_peer_asn[0].next_hop, None) + self.assertEqual(diff_1.most_advt_peer_asn[0].next_hop, "") self.assertEqual(diff_1.most_advt_peer_asn[0].origin_asns, set()) self.assertEqual(diff_1.most_advt_peer_asn[0].peer_asn, "18106") - self.assertEqual(diff_1.most_advt_peer_asn[0].prefix, None) + self.assertEqual(diff_1.most_advt_peer_asn[0].prefix, "") self.assertEqual( diff_1.most_advt_peer_asn[0].timestamp, "20220501.2305" ) @@ -2982,10 +2983,10 @@ def test_get_diff_larger(self): os.path.basename(self.upd_2_mrt), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(diff_1.most_upd_peer_asn[0].next_hop, None) + self.assertEqual(diff_1.most_upd_peer_asn[0].next_hop, "") self.assertEqual(diff_1.most_upd_peer_asn[0].origin_asns, set()) self.assertEqual(diff_1.most_upd_peer_asn[0].peer_asn, "18106") - self.assertEqual(diff_1.most_upd_peer_asn[0].prefix, None) + self.assertEqual(diff_1.most_upd_peer_asn[0].prefix, "") self.assertEqual( diff_1.most_upd_peer_asn[0].timestamp, "20220501.2305" ) @@ -3013,7 +3014,7 @@ def test_get_diff_larger(self): self.assertEqual(diff_1.file_list, []) self.assertEqual(diff_1.timestamp, "20220501.2305") - def test_gen_prev_daily_key(self): + def test_gen_prev_daily_key(self: "test_mrt_stats") -> None: self.assertRaises( ValueError, self.upd_1_stats.gen_prev_daily_key, None ) @@ -3025,13 +3026,13 @@ def test_gen_prev_daily_key(self): self.assertIsInstance(ret, str) self.assertEqual(ret, "DAILY:20220228") - def test_is_empty(self): + def test_is_empty(self: "test_mrt_stats") -> None: stats = mrt_stats() self.assertTrue(stats.is_empty()) - stats.bogon_origin_asns.append(mrt_entry) + stats.bogon_origin_asns.append(mrt_entry()) self.assertFalse(stats.is_empty()) - def test_merge(self): + def test_merge(self: "test_mrt_stats") -> None: stats_1 = mrt_stats() stats_1.from_file(self.upd_1_json) @@ -3860,9 +3861,9 @@ def test_merge(self): os.path.basename(stats_1.most_advt_prefixes[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(stats_1.most_advt_prefixes[0].next_hop, None) + self.assertEqual(stats_1.most_advt_prefixes[0].next_hop, "") self.assertEqual(stats_1.most_advt_prefixes[0].origin_asns, set()) - self.assertEqual(stats_1.most_advt_prefixes[0].peer_asn, None) + self.assertEqual(stats_1.most_advt_prefixes[0].peer_asn, "") self.assertEqual( stats_1.most_advt_prefixes[0].prefix, "89.30.150.0/23" ) @@ -3880,12 +3881,12 @@ def test_merge(self): os.path.basename(stats_3.most_bogon_asns[0].filename), os.path.basename(self.upd_3_mrt), ) - self.assertEqual(stats_3.most_bogon_asns[0].next_hop, None) + self.assertEqual(stats_3.most_bogon_asns[0].next_hop, "") self.assertEqual( stats_3.most_bogon_asns[0].origin_asns, set(["23456"]) ) - self.assertEqual(stats_3.most_bogon_asns[0].peer_asn, None) - self.assertEqual(stats_3.most_bogon_asns[0].prefix, None) + self.assertEqual(stats_3.most_bogon_asns[0].peer_asn, "") + self.assertEqual(stats_3.most_bogon_asns[0].prefix, "") self.assertEqual(stats_3.most_bogon_asns[0].timestamp, "20220601.0230") self.assertEqual(stats_3.most_bogon_asns[0].updates, 0) self.assertEqual(stats_3.most_bogon_asns[0].withdraws, 0) @@ -3897,12 +3898,12 @@ def test_merge(self): os.path.basename(stats_3.most_bogon_asns[1].filename), os.path.basename(self.upd_4_mrt), ) - self.assertEqual(stats_3.most_bogon_asns[1].next_hop, None) + self.assertEqual(stats_3.most_bogon_asns[1].next_hop, "") self.assertEqual( stats_3.most_bogon_asns[1].origin_asns, set(["65005"]) ) - self.assertEqual(stats_3.most_bogon_asns[1].peer_asn, None) - self.assertEqual(stats_3.most_bogon_asns[1].prefix, None) + self.assertEqual(stats_3.most_bogon_asns[1].peer_asn, "") + self.assertEqual(stats_3.most_bogon_asns[1].prefix, "") self.assertEqual(stats_3.most_bogon_asns[1].timestamp, "20220601.0415") self.assertEqual(stats_3.most_bogon_asns[1].updates, 0) self.assertEqual(stats_3.most_bogon_asns[1].withdraws, 0) @@ -3914,12 +3915,12 @@ def test_merge(self): os.path.basename(stats_3.most_bogon_asns[2].filename), os.path.basename(self.upd_4_mrt), ) - self.assertEqual(stats_3.most_bogon_asns[2].next_hop, None) + self.assertEqual(stats_3.most_bogon_asns[2].next_hop, "") self.assertEqual( stats_3.most_bogon_asns[2].origin_asns, set(["65530"]) ) - self.assertEqual(stats_3.most_bogon_asns[2].peer_asn, None) - self.assertEqual(stats_3.most_bogon_asns[2].prefix, None) + self.assertEqual(stats_3.most_bogon_asns[2].peer_asn, "") + self.assertEqual(stats_3.most_bogon_asns[2].prefix, "") self.assertEqual(stats_3.most_bogon_asns[2].timestamp, "20220601.0415") self.assertEqual(stats_3.most_bogon_asns[2].updates, 0) self.assertEqual(stats_3.most_bogon_asns[2].withdraws, 0) @@ -3932,9 +3933,9 @@ def test_merge(self): os.path.basename(stats_1.most_upd_prefixes[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(stats_1.most_upd_prefixes[0].next_hop, None) + self.assertEqual(stats_1.most_upd_prefixes[0].next_hop, "") self.assertEqual(stats_1.most_upd_prefixes[0].origin_asns, set()) - self.assertEqual(stats_1.most_upd_prefixes[0].peer_asn, None) + self.assertEqual(stats_1.most_upd_prefixes[0].peer_asn, "") self.assertEqual(stats_1.most_upd_prefixes[0].prefix, "89.30.150.0/23") self.assertEqual( stats_1.most_upd_prefixes[0].timestamp, "20220501.2305" @@ -3950,9 +3951,9 @@ def test_merge(self): os.path.basename(stats_1.most_withd_prefixes[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(stats_1.most_withd_prefixes[0].next_hop, None) + self.assertEqual(stats_1.most_withd_prefixes[0].next_hop, "") self.assertEqual(stats_1.most_withd_prefixes[0].origin_asns, set()) - self.assertEqual(stats_1.most_withd_prefixes[0].peer_asn, None) + self.assertEqual(stats_1.most_withd_prefixes[0].peer_asn, "") self.assertEqual( stats_1.most_withd_prefixes[0].prefix, "2a01:9e00:4279::/48" ) @@ -3970,12 +3971,12 @@ def test_merge(self): os.path.basename(stats_1.most_advt_origin_asn[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(stats_1.most_advt_origin_asn[0].next_hop, None) + self.assertEqual(stats_1.most_advt_origin_asn[0].next_hop, "") self.assertEqual( stats_1.most_advt_origin_asn[0].origin_asns, set(["20473"]) ) - self.assertEqual(stats_1.most_advt_origin_asn[0].peer_asn, None) - self.assertEqual(stats_1.most_advt_origin_asn[0].prefix, None) + self.assertEqual(stats_1.most_advt_origin_asn[0].peer_asn, "") + self.assertEqual(stats_1.most_advt_origin_asn[0].prefix, "") self.assertEqual( stats_1.most_advt_origin_asn[0].timestamp, "20220501.2305" ) @@ -3990,10 +3991,10 @@ def test_merge(self): os.path.basename(stats_1.most_advt_peer_asn[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(stats_1.most_advt_peer_asn[0].next_hop, None) + self.assertEqual(stats_1.most_advt_peer_asn[0].next_hop, "") self.assertEqual(stats_1.most_advt_peer_asn[0].origin_asns, set()) self.assertEqual(stats_1.most_advt_peer_asn[0].peer_asn, "18106") - self.assertEqual(stats_1.most_advt_peer_asn[0].prefix, None) + self.assertEqual(stats_1.most_advt_peer_asn[0].prefix, "") self.assertEqual( stats_1.most_advt_peer_asn[0].timestamp, "20220501.2305" ) @@ -4008,10 +4009,10 @@ def test_merge(self): os.path.basename(stats_1.most_upd_peer_asn[0].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(stats_1.most_upd_peer_asn[0].next_hop, None) + self.assertEqual(stats_1.most_upd_peer_asn[0].next_hop, "") self.assertEqual(stats_1.most_upd_peer_asn[0].origin_asns, set()) self.assertEqual(stats_1.most_upd_peer_asn[0].peer_asn, "18106") - self.assertEqual(stats_1.most_upd_peer_asn[0].prefix, None) + self.assertEqual(stats_1.most_upd_peer_asn[0].prefix, "") self.assertEqual( stats_1.most_upd_peer_asn[0].timestamp, "20220501.2305" ) @@ -4026,10 +4027,10 @@ def test_merge(self): os.path.basename(stats_1.most_withd_peer_asn[0].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(stats_1.most_withd_peer_asn[0].next_hop, None) + self.assertEqual(stats_1.most_withd_peer_asn[0].next_hop, "") self.assertEqual(stats_1.most_withd_peer_asn[0].origin_asns, set()) self.assertEqual(stats_1.most_withd_peer_asn[0].peer_asn, "133210") - self.assertEqual(stats_1.most_withd_peer_asn[0].prefix, None) + self.assertEqual(stats_1.most_withd_peer_asn[0].prefix, "") self.assertEqual( stats_1.most_withd_peer_asn[0].timestamp, "20220421.0200" ) @@ -4044,11 +4045,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[0].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(stats_1.most_origin_asns[0].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[0].next_hop, "") self.assertEqual( stats_1.most_origin_asns[0].origin_asns, set(["61424", "58143"]) ) - self.assertEqual(stats_1.most_origin_asns[0].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[0].peer_asn, "") self.assertEqual(stats_1.most_origin_asns[0].prefix, "5.35.174.0/24") self.assertEqual( stats_1.most_origin_asns[0].timestamp, "20220421.0200" @@ -4063,11 +4064,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[1].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(stats_1.most_origin_asns[1].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[1].next_hop, "") self.assertEqual( stats_1.most_origin_asns[1].origin_asns, set(["28198", "262375"]) ) - self.assertEqual(stats_1.most_origin_asns[1].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[1].peer_asn, "") self.assertEqual(stats_1.most_origin_asns[1].prefix, "177.131.0.0/21") self.assertEqual( stats_1.most_origin_asns[1].timestamp, "20220421.0200" @@ -4082,11 +4083,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[2].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(stats_1.most_origin_asns[2].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[2].next_hop, "") self.assertEqual( stats_1.most_origin_asns[2].origin_asns, set(["396559", "396542"]) ) - self.assertEqual(stats_1.most_origin_asns[2].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[2].peer_asn, "") self.assertEqual(stats_1.most_origin_asns[2].prefix, "2620:74:2a::/48") self.assertEqual( stats_1.most_origin_asns[2].timestamp, "20220421.0200" @@ -4101,11 +4102,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[3].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(stats_1.most_origin_asns[3].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[3].next_hop, "") self.assertEqual( stats_1.most_origin_asns[3].origin_asns, set(["138346", "134382"]) ) - self.assertEqual(stats_1.most_origin_asns[3].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[3].peer_asn, "") self.assertEqual(stats_1.most_origin_asns[3].prefix, "103.88.233.0/24") self.assertEqual( stats_1.most_origin_asns[3].timestamp, "20220421.0200" @@ -4120,11 +4121,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[4].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(stats_1.most_origin_asns[4].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[4].next_hop, "") self.assertEqual( stats_1.most_origin_asns[4].origin_asns, set(["37154", "7420"]) ) - self.assertEqual(stats_1.most_origin_asns[4].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[4].peer_asn, "") self.assertEqual(stats_1.most_origin_asns[4].prefix, "196.46.192.0/19") self.assertEqual( stats_1.most_origin_asns[4].timestamp, "20220421.0200" @@ -4139,11 +4140,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[5].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(stats_1.most_origin_asns[5].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[5].next_hop, "") self.assertEqual( stats_1.most_origin_asns[5].origin_asns, set(["136561", "59362"]) ) - self.assertEqual(stats_1.most_origin_asns[5].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[5].peer_asn, "") self.assertEqual(stats_1.most_origin_asns[5].prefix, "123.253.98.0/23") self.assertEqual( stats_1.most_origin_asns[5].timestamp, "20220421.0200" @@ -4158,11 +4159,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[6].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(stats_1.most_origin_asns[6].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[6].next_hop, "") self.assertEqual( stats_1.most_origin_asns[6].origin_asns, set(["132608", "17806"]) ) - self.assertEqual(stats_1.most_origin_asns[6].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[6].peer_asn, "") self.assertEqual(stats_1.most_origin_asns[6].prefix, "114.130.38.0/24") self.assertEqual( stats_1.most_origin_asns[6].timestamp, "20220421.0200" @@ -4177,11 +4178,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[7].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(stats_1.most_origin_asns[7].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[7].next_hop, "") self.assertEqual( stats_1.most_origin_asns[7].origin_asns, set(["136907", "55990"]) ) - self.assertEqual(stats_1.most_origin_asns[7].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[7].peer_asn, "") self.assertEqual(stats_1.most_origin_asns[7].prefix, "124.71.250.0/24") self.assertEqual( stats_1.most_origin_asns[7].timestamp, "20220421.0200" @@ -4196,11 +4197,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[8].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(stats_1.most_origin_asns[8].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[8].next_hop, "") self.assertEqual( stats_1.most_origin_asns[8].origin_asns, set(["136907", "55990"]) ) - self.assertEqual(stats_1.most_origin_asns[8].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[8].peer_asn, "") self.assertEqual(stats_1.most_origin_asns[8].prefix, "139.9.98.0/24") self.assertEqual( stats_1.most_origin_asns[8].timestamp, "20220421.0200" @@ -4215,11 +4216,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[9].filename), os.path.basename(self.upd_1_mrt), ) - self.assertEqual(stats_1.most_origin_asns[9].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[9].next_hop, "") self.assertEqual( stats_1.most_origin_asns[9].origin_asns, set(["7545", "4739"]) ) - self.assertEqual(stats_1.most_origin_asns[9].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[9].peer_asn, "") self.assertEqual(stats_1.most_origin_asns[9].prefix, "203.19.254.0/24") self.assertEqual( stats_1.most_origin_asns[9].timestamp, "20220421.0200" @@ -4235,11 +4236,11 @@ def test_merge(self): os.path.basename(self.upd_2_mrt), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(stats_1.most_origin_asns[10].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[10].next_hop, "") self.assertEqual( stats_1.most_origin_asns[10].origin_asns, set(["271204", "266181"]) ) - self.assertEqual(stats_1.most_origin_asns[10].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[10].peer_asn, "") self.assertEqual( stats_1.most_origin_asns[10].prefix, "179.49.190.0/23" ) @@ -4256,11 +4257,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[11].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(stats_1.most_origin_asns[11].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[11].next_hop, "") self.assertEqual( stats_1.most_origin_asns[11].origin_asns, set(["7487", "54396"]) ) - self.assertEqual(stats_1.most_origin_asns[11].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[11].peer_asn, "") self.assertEqual( stats_1.most_origin_asns[11].prefix, "205.197.192.0/21" ) @@ -4277,11 +4278,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[12].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(stats_1.most_origin_asns[12].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[12].next_hop, "") self.assertEqual( stats_1.most_origin_asns[12].origin_asns, set(["203020", "29802"]) ) - self.assertEqual(stats_1.most_origin_asns[12].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[12].peer_asn, "") self.assertEqual( stats_1.most_origin_asns[12].prefix, "206.123.159.0/24" ) @@ -4298,11 +4299,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[13].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(stats_1.most_origin_asns[13].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[13].next_hop, "") self.assertEqual( stats_1.most_origin_asns[13].origin_asns, set(["52000", "19318"]) ) - self.assertEqual(stats_1.most_origin_asns[13].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[13].peer_asn, "") self.assertEqual( stats_1.most_origin_asns[13].prefix, "68.168.210.0/24" ) @@ -4319,11 +4320,11 @@ def test_merge(self): os.path.basename(stats_1.most_origin_asns[14].filename), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(stats_1.most_origin_asns[14].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[14].next_hop, "") self.assertEqual( stats_1.most_origin_asns[14].origin_asns, set(["55020", "137951"]) ) - self.assertEqual(stats_1.most_origin_asns[14].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[14].peer_asn, "") self.assertEqual( stats_1.most_origin_asns[14].prefix, "156.241.128.0/22" ) @@ -4341,11 +4342,11 @@ def test_merge(self): os.path.basename(self.upd_2_mrt), os.path.basename(self.upd_2_mrt), ) - self.assertEqual(stats_1.most_origin_asns[15].next_hop, None) + self.assertEqual(stats_1.most_origin_asns[15].next_hop, "") self.assertEqual( stats_1.most_origin_asns[15].origin_asns, set(["269208", "268347"]) ) - self.assertEqual(stats_1.most_origin_asns[15].peer_asn, None) + self.assertEqual(stats_1.most_origin_asns[15].peer_asn, "") self.assertEqual(stats_1.most_origin_asns[15].prefix, "2804:610c::/32") self.assertEqual( stats_1.most_origin_asns[15].timestamp, "20220501.2305" @@ -4405,7 +4406,7 @@ def test_merge(self): ) self.assertEqual(stats_1.timestamp, "20220501.2305") - def test_to_file(self): + def test_to_file(self: "test_mrt_stats") -> None: self.assertRaises(ValueError, self.upd_1_stats.to_file, None) self.assertRaises(TypeError, self.upd_1_stats.to_file, 123) self.assertRaises(OSError, self.upd_1_stats.to_file, "/2f98h3fwfh4fwp") @@ -4419,7 +4420,7 @@ def test_to_file(self): os.unlink(self.upd_1_test) - def test_to_json(self): + def test_to_json(self: "test_mrt_stats") -> None: json_str = self.upd_1_stats.to_json() self.assertIsInstance(json_str, str) self.assertNotEqual(json_str, "") @@ -4463,13 +4464,13 @@ def test_to_json(self): self.assertTrue(stats.equal_to(self.upd_1_stats)) - def test_ts_ymd(self): + def test_ts_ymd(self: "test_mrt_stats") -> None: self.assertEqual(self.upd_1_stats.ts_ymd(), "20220421") - def test_ts_ymd_format(self): + def test_ts_ymd_format(self: "test_mrt_stats") -> None: self.assertEqual(self.upd_1_stats.ts_ymd_format(), "2022/04/21") - def tearDown(self): + def tearDown(self: "test_mrt_stats") -> None: os.remove(self.upd_1_mrt) os.remove(self.upd_2_mrt) os.remove(self.upd_3_mrt) @@ -4477,5 +4478,5 @@ def tearDown(self): os.remove(self.upd_5_mrt) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/dnas/tests/test_whois.py b/dnas/tests/test_whois.py index d8a7d65..abc7403 100644 --- a/dnas/tests/test_whois.py +++ b/dnas/tests/test_whois.py @@ -3,16 +3,13 @@ import unittest sys.path.append( - os.path.join( - os.path.dirname(os.path.realpath(__file__)) - , "../" - ) + os.path.join(os.path.dirname(os.path.realpath(__file__)), "../") ) from dnas.whois import whois -class test_whois(unittest.TestCase): - def test_as_lookup(self): +class test_whois(unittest.TestCase): + def test_as_lookup(self: "test_whois") -> None: self.assertRaises(ValueError, whois.as_lookup, -123) self.assertRaises(TypeError, whois.as_lookup, "abc") self.assertIsInstance(whois.as_lookup(41695), str) @@ -21,8 +18,8 @@ def test_as_lookup(self): self.assertEqual("", whois.as_lookup(65000)) # ASN which redirects to a private whois server - #self.assertEqual("", whois.as_lookup(8100)) - #print(whois.as_lookup(8100)) + # self.assertEqual("", whois.as_lookup(8100)) + # print(whois.as_lookup(8100)) # ^ they've gone public / fixed the redirect so no longer a valid test. # Whois entry which will decode using utf-8 @@ -31,5 +28,6 @@ def test_as_lookup(self): # Whois entry which will decode using ISO-8859-1 self.assertEqual("Linkever", whois.as_lookup(38336)) -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/docker/build_dnas.sh b/docker/build_dnas.sh index 9542034..0101632 100755 --- a/docker/build_dnas.sh +++ b/docker/build_dnas.sh @@ -1,8 +1,9 @@ -#!/bin/bash +#!/usr/bin/env bash set -e cd /opt/dnas/ +# shellcheck disable=SC1091 source venv/bin/activate git checkout main git pull diff --git a/docker/manual_range.sh b/docker/manual_range.sh index 0591355..2db24e6 100755 --- a/docker/manual_range.sh +++ b/docker/manual_range.sh @@ -32,11 +32,11 @@ EY="${4}" EM="${5}" ED="${6}" -for year in $(seq -w $SY $EY) +for year in $(seq -w "$SY" "$EY") do - for month in $(seq -w $SM $EM) + for month in $(seq -w "$SM" "$EM") do - for day in $(seq -w $SD $ED) + for day in $(seq -w "$SD" "$ED") do echo "doing ${year}${month}${day}:" docker-compose run --rm --name tmp_getter --entrypoint /opt/pypy dnas_getter -- \ diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..a17d860 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,9 @@ +[mypy] +check_untyped_defs = True +disallow_any_explicit = True +no_implicit_optional = True +show_error_context = True +strict_optional = True +warn_incomplete_stub = True +warn_return_any = False +warn_unused_configs = True diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..df41cab --- /dev/null +++ b/tox.ini @@ -0,0 +1,60 @@ +[tox] +envlist = + py311 + lint + mypy + pytest + shellcheck + +[testenv] +skip_install=true +recreate=true +deps = -r dnas/requirements.txt + +[testenv:lint] +skip_install=true +recreate=true +deps = + black + isort +changedir = {toxinidir} +commands = + black --check --diff -l 79 dnas/ + isort --check-only --df --profile black -l 79 dnas/ + +[testenv:mypy] +skip_install=true +recreate=true +deps = + {[testenv]deps} + types-redis + types-requests +changedir = {toxinidir} +commands = mypy --config-file mypy.ini dnas/ + +[testenv:pytest] +skip_install=true +recreate=true +deps = {[testenv]deps} +passenv = * +changedir = {toxinidir} +commands = + pytest -vvvv dnas/tests/ {posargs} + +[testenv:shellcheck] +skip_install=true +recreate=false +changedir = {toxinidir} +whitelist_externals = /bin/bash +commands = /bin/bash -c "/usr/bin/env shellcheck docker/*.sh" + +[testenv:fixlint] +skip_install=true +recreate=true +deps = + black + isort +changedir = {toxinidir} +commands = + black -l 79 dnas/ + isort --profile black -l 79 dnas/