Skip to content

Commit

Permalink
Merge pull request #19 from DefiantLabs/feat/plan-info-in-response-an…
Browse files Browse the repository at this point in the history
…d-semantic-version-github-tag-checking

Feat/plan info in response and semantic version GitHub tag checking
  • Loading branch information
pharr117 authored Sep 10, 2023
2 parents 5aff50b + 1c260a3 commit fa99bb5
Show file tree
Hide file tree
Showing 2 changed files with 136 additions and 14 deletions.
149 changes: 135 additions & 14 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import os
import json
import subprocess
import semantic_version

app = Flask(__name__)

Expand All @@ -30,13 +31,13 @@

# Initialize repo vars
# repo_path = ""
# repo_last_download_time = None
# repo_retain_hours = int(os.environ.get('REPO_RETAIN_HOURS', 3))

# Initialize number of workers
num_workers = int(os.environ.get("NUM_WORKERS", 10))

GITHUB_API_BASE_URL = "https://api.github.com/repos/cosmos/chain-registry/contents"
GITHUB_API_URL = "https://api.github.com"
GITHUB_API_BASE_URL = GITHUB_API_URL + "/repos/cosmos/chain-registry/contents"

# these servers have given consistent error responses, this list is used to skip them
SERVER_BLACKLIST = [
Expand Down Expand Up @@ -215,6 +216,7 @@ def reorder_data(data):
("source", data.get("source")),
("upgrade_block_height", data.get("upgrade_block_height")),
("estimated_upgrade_time", data.get("estimated_upgrade_time")),
("upgrade_plan", data.get("upgrade_plan")),
("version", data.get("version")),
("error", data.get("error")),
]
Expand Down Expand Up @@ -250,7 +252,7 @@ def fetch_endpoints(network, base_url):
return [], []


def fetch_active_upgrade_proposals(rest_url):
def fetch_active_upgrade_proposals(rest_url, network, network_repo_url):
try:
response = requests.get(
f"{rest_url}/cosmos/gov/v1beta1/proposals?proposal_status=2", verify=False
Expand All @@ -275,8 +277,15 @@ def fetch_active_upgrade_proposals(rest_url):

# naive regex search on whole message dump
content_dump = json.dumps(content)
versions = SEMANTIC_VERSION_PATTERN.findall(content_dump)
version = max(versions, key=len)

#prefer any version strings found in plan_name first
versions = SEMANTIC_VERSION_PATTERN.findall(plan_name)
if len(versions) == 0:
#fallback to naive search across whole message dump
versions = SEMANTIC_VERSION_PATTERN.findall(content_dump)
if versions:
network_repo_semver_tags = get_network_repo_semver_tags(network, network_repo_url)
version = find_best_semver_for_versions(network, versions, network_repo_semver_tags)
try:
height = int(plan.get("height", 0))
except ValueError:
Expand All @@ -294,8 +303,7 @@ def fetch_active_upgrade_proposals(rest_url):
)
raise e


def fetch_current_upgrade_plan(rest_url):
def fetch_current_upgrade_plan(rest_url, network, network_repo_url):
try:
response = requests.get(
f"{rest_url}/cosmos/upgrade/v1beta1/current_plan", verify=False
Expand All @@ -315,14 +323,15 @@ def fetch_current_upgrade_plan(rest_url):

if version_matches:
# Find the longest match
version = max(version_matches, key=len)
network_repo_semver_tags = get_network_repo_semver_tags(network, network_repo_url)
version = find_best_semver_for_versions(network, version_matches, network_repo_semver_tags)
try:
height = int(plan.get("height", 0))
except ValueError:
height = 0
return plan_name, version, height
return plan_name, version, height, plan_dump

return None, None, None
return None, None, None, None
except requests.RequestException as e:
print(f"Error received from server {rest_url}: {e}")
raise e
Expand All @@ -332,6 +341,102 @@ def fetch_current_upgrade_plan(rest_url):
)
raise e

def fetch_network_repo_tags(network, network_repo):
if "github.com" in network_repo:
try:
repo_parts = network_repo.split("/")
repo_name = repo_parts[-1]
repo_owner = repo_parts[-2]

if not repo_name or not repo_owner:
print(f"Could not parse github repo name or owner for {network}")
return []

tags_url = GITHUB_API_URL + f"/repos/{repo_owner}/{repo_name}/tags"
tags = requests.get(tags_url)
return list(map(lambda tag: tag["name"], tags.json()))
except Exception as e:
print(f"Could not fetch tags from github for network {network}")
print(e)
return []
else:
print(f"Could not fetch tags from github for network {network}: unsupported repo url {network_repo}")
return []

def get_network_repo_semver_tags(network, network_repo_url):
cached_tags = cache.get(network_repo_url + "_tags")
if not cached_tags:
network_repo_tag_strings = fetch_network_repo_tags(network, network_repo_url)
#cache response from network repo url to reduce api calls to whatever service is hosting the repo
cache.set(network_repo_url + "_tags", network_repo_tag_strings, timeout=600)
else:
network_repo_tag_strings = cached_tags

network_repo_semver_tags = []
for tag in network_repo_tag_strings:
#only use semantic version tags
try:
if tag.startswith("v"):
version = semantic_version.Version(tag[1:])
else:
version = semantic_version.Version(tag)
network_repo_semver_tags.append(version)
except Exception as e:
pass

return network_repo_semver_tags

def find_best_semver_for_versions(network, network_version_strings, network_repo_semver_tags):
if len(network_repo_semver_tags) == 0:
return max(network_version_strings, key=len)

try:
# find version matches in the repo tags
possible_semvers = []
for version_string in network_version_strings:
if version_string.startswith("v"):
version_string = version_string[1:]

contains_minor_version = True
contains_patch_version = True

# our regex captures version strings like "v1" without a minor or patch version, so we need to check for that
# are these conditions good enough or is it missing any cases?
if "." not in version_string:
contains_minor_version = False
contains_patch_version = False
version_string = version_string + ".0.0"
elif version_string.count(".") == 1:
contains_patch_version = False
version_string = version_string + ".0"

current_semver = semantic_version.Version(version_string)

for semver_tag in network_repo_semver_tags:
# find matching tags based on what information we have
if semver_tag.major == current_semver.major:
if contains_minor_version:
if semver_tag.minor == current_semver.minor:
if contains_patch_version:
if semver_tag.patch == current_semver.patch:
possible_semvers.append(semver_tag)
else:
possible_semvers.append(semver_tag)
else:
possible_semvers.append(semver_tag)

# currently just return the highest semver from the list of possible matches. This may be too naive
if len(possible_semvers) != 0:
#sorting is built into the semantic version library
possible_semvers.sort(reverse=True)
semver = possible_semvers[0]
return f"v{semver.major}.{semver.minor}.{semver.patch}"
except Exception as e:
print(f"Failed to parse version strings into semvers for network {network}")
print(e)
return max(network_version_strings, key=len)

return max(network_version_strings, key=len)

def fetch_data_for_network(network, network_type, repo_path):
"""Fetch data for a given network."""
Expand All @@ -343,7 +448,7 @@ def fetch_data_for_network(network, network_type, repo_path):
chain_json_path = os.path.join(repo_path, "testnets", network, "chain.json")
else:
raise ValueError(f"Invalid network type: {network_type}")

output_data = {}
err_output_data = {
"network": network,
"type": network_type,
Expand All @@ -363,6 +468,8 @@ def fetch_data_for_network(network, network_type, repo_path):
with open(chain_json_path, "r") as file:
data = json.load(file)

network_repo_url = data.get("codebase", {}).get("git_repo", None)

rest_endpoints = data.get("apis", {}).get("rest", [])
rpc_endpoints = data.get("apis", {}).get("rpc", [])

Expand Down Expand Up @@ -423,12 +530,13 @@ def fetch_data_for_network(network, network_type, repo_path):
active_upgrade_name,
active_upgrade_version,
active_upgrade_height,
) = fetch_active_upgrade_proposals(current_endpoint)
) = fetch_active_upgrade_proposals(current_endpoint, network, network_repo_url)
(
current_upgrade_name,
current_upgrade_version,
current_upgrade_height,
) = fetch_current_upgrade_plan(current_endpoint)
current_plan_dump,
) = fetch_current_upgrade_plan(current_endpoint, network, network_repo_url)
except:
if index + 1 < len(healthy_rest_endpoints):
print(
Expand Down Expand Up @@ -456,13 +564,26 @@ def fetch_data_for_network(network, network_type, repo_path):
if (
current_upgrade_version
and (current_upgrade_height is not None)
and (current_plan_dump is not None)
and current_upgrade_height > latest_block_height
):
upgrade_block_height = current_upgrade_height
upgrade_plan = json.loads(current_plan_dump)
upgrade_version = current_upgrade_version
upgrade_name = current_upgrade_name
source = "current_upgrade_plan"
rest_server_used = current_endpoint
# Extract the relevant information from the parsed JSON
info = json.loads(upgrade_plan.get("info", "{}"))
binaries = info.get("binaries", {})

# Include the expanded information in the output data
output_data["upgrade_plan"] = {
"height": upgrade_plan.get("height", None),
"binaries": binaries,
"name": upgrade_plan.get("name", None),
"upgraded_client_state": upgrade_plan.get("upgraded_client_state", None),
}
break

if not active_upgrade_version and not current_upgrade_version:
Expand Down Expand Up @@ -505,6 +626,7 @@ def fetch_data_for_network(network, network_type, repo_path):
"upgrade_name": upgrade_name,
"source": source,
"upgrade_block_height": upgrade_block_height,
"upgrade_plan": output_data.get("upgrade_plan", None),
"estimated_upgrade_time": estimated_upgrade_time,
"version": upgrade_version,
}
Expand All @@ -515,7 +637,6 @@ def fetch_data_for_network(network, network_type, repo_path):
# periodic cache update
def update_data():
"""Function to periodically update the data for mainnets and testnets."""
global repo_last_download_time

while True:
start_time = datetime.now() # Capture the start time
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@ Flask-Caching==2.0.2
requests==2.26.0
urllib3==1.26.7
gunicorn==20.1.0
semantic-version==2.10.0

0 comments on commit fa99bb5

Please sign in to comment.