diff --git a/ccos/data/asana.py b/ccos/data/asana.py new file mode 100644 index 0000000..60ef14e --- /dev/null +++ b/ccos/data/asana.py @@ -0,0 +1,47 @@ +# Standard library +import logging +import os +import sys + +# Third-party +import asana + +# To see workspace GID, log into Asana and then view: +# https://app.asana.com/api/1.0/workspaces +ASANA_WORKSPACE_GID = "133733285600979" +# To see project GIDs, log into Asana and then view: +# https://app.asana.com/api/1.0/projects +# +# To see "Community Team Tracking" project section GIDs, log into Asana and +# then view: +# https://app.asana.com/api/1.0/projects/1172465506923657/sections +ASANA_SECTION_GID = "1172465506923661" +LOG = logging.root + + +def setup_asana_client(): + LOG.info("Setting up Asana client...") + try: + asana_token = os.environ["ADMIN_ASANA_TOKEN"] + except KeyError: + LOG.critical("missin ADMIN_ASANA_TOKEN environment variable") + sys.exit(1) + asana_client = asana.Client.access_token(asana_token) + asana_client.headers = {"asana-enable": "new_goal_memberships"} + try: + # Perform simple API operation to test authentication + asana_client.workspaces.get_workspace(ASANA_WORKSPACE_GID) + except asana.error.NoAuthorizationError as e: + LOG.critical(f"{e.status} {e.message} (is ADMIN_ASANA_TOKEN valid?)") + sys.exit(1) + LOG.success("done.") + return asana_client + + +def get_asana_team_members(asana_client): + LOG.info("Get Team Members...") + team_members = asana_client.tasks.find_by_section( + ASANA_SECTION_GID, opt_fields=["name", "custom_fields"] + ) + LOG.success("done.") + return team_members diff --git a/ccos/data/get_community_team_data.py b/ccos/data/get_community_team_data.py index bf00d60..88a6b46 100644 --- a/ccos/data/get_community_team_data.py +++ b/ccos/data/get_community_team_data.py @@ -1,43 +1,11 @@ # Standard library -import inspect import logging -import os import sys -# Third-party -import asana +LOG = logging.root -# First-party/Local -import ccos.log -ASANA_WORKSPACE_GID = "133733285600979" -ASANA_PROJECT_GID = "1172465506923661" - -log_name = os.path.basename(os.path.splitext(inspect.stack()[-1].filename)[0]) -LOG = logging.getLogger(log_name) -ccos.log.reset_handler() - - -def setup_asana_client(): - LOG.info("Setting up Asana client...") - try: - asana_token = os.environ["ADMIN_ASANA_TOKEN"] - except KeyError: - LOG.critical("missin ADMIN_ASANA_TOKEN environment variable") - sys.exit(1) - asana_client = asana.Client.access_token(asana_token) - asana_client.headers = {"asana-enable": "new_goal_memberships"} - try: - # Perform simple API operation to test authentication - asana_client.workspaces.get_workspace(ASANA_WORKSPACE_GID) - except asana.error.NoAuthorizationError as e: - LOG.critical(f"{e.status} {e.message} (is ADMIN_ASANA_TOKEN valid?)") - sys.exit(1) - LOG.info("done.") - return asana_client - - -def generate_databag(asana_client): +def generate_databag(team_members): """ This method pulls the team members from Asana and loads them into the databag after a little @@ -71,17 +39,9 @@ def generate_databag(asana_client): ] } """ - - LOG.info("Pulling from Asana and generating databag...") databag = {"projects": [], "community_builders": []} - - members = asana_client.tasks.find_by_section( - ASANA_PROJECT_GID, opt_fields=["name", "custom_fields"] - ) - LOG.info("Team members pulled.") - LOG.info("Processing team members...") - for member in members: + for member in team_members: if member["name"] == "": continue # Sometimes blank names come up role = get_custom_field(member, "Role") @@ -115,7 +75,7 @@ def generate_databag(asana_client): ) break - LOG.info("Done.") + LOG.success("done.") return databag @@ -195,8 +155,8 @@ def get_custom_field(task, field_name): return field["text_value"] -def get_community_team_data(asana_client, repo_names): - databag = generate_databag(asana_client) +def get_community_team_data(team_members, repo_names): + databag = generate_databag(team_members) databag = prune_databag(databag) databag = verify_databag(databag, repo_names) databag = sort_databag(databag) diff --git a/ccos/data/get_repo_data.py b/ccos/data/get_repo_data.py index db53579..25ab4d7 100644 --- a/ccos/data/get_repo_data.py +++ b/ccos/data/get_repo_data.py @@ -1,38 +1,13 @@ -#!/usr/bin/env python3 -# vim: set fileencoding=utf-8: - # Standard library -import inspect import logging -import os.path # Third-party import emoji import yaml -from github import Github from github.GithubException import GithubException, UnknownObjectException -# First-party/Local -import ccos.log -from ccos.data.push_data_via_git import GITHUB_ORGANIZATION, GITHUB_TOKEN - CC_METADATA_FILE_NAME = ".cc-metadata.yml" - -log_name = os.path.basename(os.path.splitext(inspect.stack()[-1].filename)[0]) -LOG = logging.getLogger(log_name) -ccos.log.reset_handler() - - -def set_up_github_client(): - LOG.info("Setting up GitHub client...") - github_client = Github(GITHUB_TOKEN) - return github_client - - -def get_cc_organization(github_client): - LOG.info("Getting CC's GitHub organization...") - cc = github_client.get_organization(GITHUB_ORGANIZATION) - return cc +LOG = logging.root def get_repositories(organization): @@ -113,19 +88,15 @@ def get_repo_data_dict(repo_data_list): return {"repos": repo_data_list} -def get_repo_data(): - github_client = set_up_github_client() - cc = get_cc_organization(github_client) - repos = get_repositories(cc) +def get_repo_data(gh_org_cc): + repos = get_repositories(gh_org_cc) repo_data_list = get_repo_data_list(repos) data = get_repo_data_dict(repo_data_list) return data -def get_repo_names(): - github_client = set_up_github_client() - cc = get_cc_organization(github_client) - repos = get_repositories(cc) +def get_repo_names(gh_org_cc): + repos = get_repositories(gh_org_cc) names = [] for repo in repos: names.append(repo.name) diff --git a/ccos/data/push_data_via_git.py b/ccos/data/push_data_via_git.py index dff218c..61e47e5 100644 --- a/ccos/data/push_data_via_git.py +++ b/ccos/data/push_data_via_git.py @@ -1,8 +1,4 @@ -#!/usr/bin/env python3 -# vim: set fileencoding=utf-8: - # Standard library -import inspect import json import logging import os @@ -12,33 +8,25 @@ import git # First-party/Local -import ccos.log +from ccos.gh_utils import GITHUB_ORGANIZATION, get_credentials -GIT_USER_NAME = "CC creativecommons.github.io Bot" -GIT_USER_EMAIL = "cc-creativecommons-github-io-bot@creativecommons.org" - -GITHUB_USERNAME = "cc-creativecommons-github-io-bot" -GITHUB_ORGANIZATION = "creativecommons" GITHUB_REPO_NAME = "creativecommons.github.io-source" - -GITHUB_TOKEN = os.environ["ADMIN_GITHUB_TOKEN"] -GITHUB_REPO_URL_WITH_CREDENTIALS = ( - f"https://{GITHUB_USERNAME}:{GITHUB_TOKEN}" - f"@github.com/{GITHUB_ORGANIZATION}/{GITHUB_REPO_NAME}.git" -) - +GIT_USER_EMAIL = "cc-creativecommons-github-io-bot@creativecommons.org" +GIT_USER_NAME = "CC creativecommons.github.io Bot" JSON_FILE_DIR = "databags" - -log_name = os.path.basename(os.path.splitext(inspect.stack()[-1].filename)[0]) -LOG = logging.getLogger(log_name) -ccos.log.reset_handler() +LOG = logging.root def set_up_repo(git_working_dir): + github_username, github_token = get_credentials() + github_repo_url_with_credentials = ( + f"https://{github_username}:{github_token}" + f"@github.com/{GITHUB_ORGANIZATION}/{GITHUB_REPO_NAME}.git" + ) if not os.path.isdir(git_working_dir): LOG.info("Cloning repo...") repo = git.Repo.clone_from( - url=GITHUB_REPO_URL_WITH_CREDENTIALS, to_path=git_working_dir + url=github_repo_url_with_credentials, to_path=git_working_dir ) else: LOG.info("Setting up repo...") diff --git a/ccos/gh_utils.py b/ccos/gh_utils.py index 31d93cd..08a68f6 100644 --- a/ccos/gh_utils.py +++ b/ccos/gh_utils.py @@ -1,5 +1,4 @@ # Standard library -import inspect import logging import os import re @@ -9,15 +8,9 @@ from github import Github from github.GithubException import BadCredentialsException -# First-party/Local -import ccos.log - GITHUB_ORGANIZATION = "creativecommons" GITHUB_USERNAME_DEFAULT = "cc-creativecommons-github-io-bot" - -log_name = os.path.basename(os.path.splitext(inspect.stack()[-1].filename)[0]) -LOG = logging.getLogger(log_name) -ccos.log.reset_handler() +LOG = logging.root def get_credentials(): @@ -37,22 +30,22 @@ def set_up_github_client(): _, github_token = get_credentials() LOG.info("Setting up GitHub client...") github_client = Github(github_token) - LOG.log(ccos.log.SUCCESS, "done.") + LOG.success("done.") return github_client def get_cc_organization(github_client): LOG.info("Getting CC's GitHub organization...") try: - cc = github_client.get_organization(GITHUB_ORGANIZATION) + gh_org_cc = github_client.get_organization(GITHUB_ORGANIZATION) except BadCredentialsException as e: LOG.critical( f"{e.status} {e.data['message']} (see" f" {e.data['documentation_url']})" ) sys.exit(1) - LOG.log(ccos.log.SUCCESS, "done.") - return cc + LOG.success("done.") + return gh_org_cc def get_team_slug_name(project_name, role): diff --git a/ccos/log.py b/ccos/log.py index 73d46ea..0db082b 100644 --- a/ccos/log.py +++ b/ccos/log.py @@ -9,16 +9,16 @@ class IndentFormatter(logging.Formatter): """ Format the given log messages with proper indentation based on the stack depth of the code invoking the logger. This removes the need for manual - indentation using ``'\t'`` characters. + indentation using tab characters. """ - color_map = { - logging.CRITICAL: 31, # red - logging.ERROR: 31, # red - logging.WARNING: 33, # yellow - SUCCESS: 32, # green - logging.INFO: 34, # blue - logging.DEBUG: 35, # magenta + color_map = { # ............. Level ## Color ## + logging.CRITICAL: 31, # . CRITICAL 50 red 31 + logging.ERROR: 31, # .... ERROR 40 red 31 + logging.WARNING: 33, # .. WARNING 30 yellow 33 + SUCCESS: 32, # .......... SUCCESS 21 green 32 + logging.INFO: 34, # ..... INFO 20 blue 34 + logging.DEBUG: 35, # .... DEBUG 10 magenta 35 } @staticmethod @@ -30,7 +30,6 @@ def identify_cut(filenames): @param filenames: the names of all files from which logs were pushed @return: the index of the filename from which the logger was called """ - lib_string = "lib/python" lib_started = False for index, filename in enumerate(filenames): @@ -44,7 +43,6 @@ def __init__(self): Initialise the formatter with the fixed log format. The format is intentionally minimal to get clean and readable logs. """ - fmt = "%(message)s" super().__init__(fmt=fmt) @@ -58,17 +56,19 @@ def update_format(self, record): @param record: the record based on whose level to update the formatting """ prefix = "\u001b[" - color = f"{self.color_map[record.levelno]}m" - bold = "1m" - reset = "0m" + color = f"{prefix}{self.color_map[record.levelno]}m" + bold = f"{prefix}1m" + gray = f"{prefix}1m{prefix}30m" + reset = f"{prefix}0m" self._style._fmt = ( - "%(asctime)s │ " - f"{prefix}{color}%(levelname)-8s{prefix}{reset} │ " + f"%(asctime)s" + f" {gray}│{reset} {color}%(levelname)-8s{reset} {gray}│{reset} " ) if hasattr(record, "function"): self._style._fmt += ( - f"%(indent)s{prefix}{bold}%(function)s{prefix}{reset}: " - "%(message)s" + f"{gray}%(indent)s{reset}" + f"{bold}%(function)s{reset}{gray}:{reset}" + " %(message)s" ) else: self._style._fmt += "%(indent)s%(message)s" @@ -88,7 +88,7 @@ def format(self, record): self.cut = self.identify_cut(filenames) # Inject custom information into the record - record.indent = ". " * (depth - self.baseline + self.manual_push) + record.indent = "." * (depth - self.baseline + self.manual_push) if depth > self.cut: record.function = stack[self.cut].function @@ -109,63 +109,59 @@ def delta_indent(self, delta=1): the value indents the logs and decreasing it de-indents them. @param delta: the number of steps by which to indent/de-indent the logs """ - self.manual_push += delta - if self.manual_push < 0: - self.manual_push = 0 def reset(self): """ Reset the baseline and cut attributes so that the next call to the logger can repopulate them to the new values for the particular file. """ - self.baseline = None self.cut = None self.manual_push = 0 -def set_up_logging(): +def setup_logger(): """ - Configure logging with some first-run configuration. This method must be - called only once from the main process. + Configure RootLogger. This method must be called only once from the main + script (not from modules/libraries included by that script). """ - formatter = IndentFormatter() - - handler = logging.StreamHandler() - handler.setFormatter(formatter) + def log_success_class(self, message, *args, **kwargs): + if self.isEnabledFor(SUCCESS): + # The 'args' below (instead of '*args') is correct + self._log(SUCCESS, message, args, **kwargs) - logging.basicConfig(level=logging.INFO, handlers=(handler,)) - logging.addLevelName(SUCCESS, "SUCCESS") + def log_success_root(message, *args, **kwargs): + logging.log(SUCCESS, message, *args, **kwargs) + def change_indent_class(self, delta=1): + """ + Indent the output of the logger by the given number of steps. If + positive, the indentation increases and if negative, it decreases. + @param delta: the number of steps by which to indent/de-indent the logs + """ + handlers = self.handlers + if len(handlers) > 0: + formatter = handlers[-1].formatter + if isinstance(formatter, IndentFormatter): + formatter.delta_indent(delta) -def reset_handler(): - """ - Reset the formatter on the handler on the root logger. This causes the next - call to the logger can repopulate them based on the new stack in a new - file. - """ + logging.addLevelName(SUCCESS, "SUCCESS") + setattr(logging.getLoggerClass(), "success", log_success_class) + setattr(logging, "success", log_success_root) + setattr(logging.getLoggerClass(), "change_indent", change_indent_class) - handlers = logging.root.handlers - if len(handlers) > 0: - formatter = handlers[-1].formatter - if isinstance(formatter, IndentFormatter): - formatter.reset() + formatter = IndentFormatter() + handler = logging.StreamHandler() + handler.setFormatter(formatter) -def change_indent(delta=1): - """ - Indent the output of the logger by the given number of steps. If positive, - the indentation increases and if negative, it decreases. - @param delta: the number of steps by which to indent/de-indent the logs - """ + logger = logging.root + logger.addHandler(handler) + logger.setLevel(logging.INFO) - handlers = logging.root.handlers - if len(handlers) > 0: - formatter = handlers[-1].formatter - if isinstance(formatter, IndentFormatter): - formatter.delta_indent(delta) + return logger -__all__ = ["set_up_logging", "reset_handler", "change_indent", "SUCCESS"] +__all__ = ["setup_logger"] diff --git a/ccos/norm/get_labels.py b/ccos/norm/get_labels.py index 65ae245..1ba987b 100644 --- a/ccos/norm/get_labels.py +++ b/ccos/norm/get_labels.py @@ -1,4 +1,5 @@ # Standard library +import logging from pathlib import Path # Third-party @@ -7,22 +8,31 @@ # First-party/Local from ccos.norm.models import Group, Label +LOG = logging.root -def get_groups(): + +def get_required_label_groups(): """ - Get the list of label groups. - @return: the list of label groups. + Get the list of all the label_groups, at least one label of which is + required to be present on every triaged issue. + @return: the filtered list of label groups that that are required by + definition """ - labels_dict = load_yaml_from_file("labels") - groups = [] + label_groups = [] for group_info in labels_dict["groups"]: group = Group(**group_info) label_names = group_info.pop("labels", []) - groups.append(group) + label_groups.append(group) for label_info in label_names: Label(**label_info, group=group) - return groups + + LOG.info(f"Filtering {len(label_groups)} label_groups...") + required_label_groups = [ + group for group in label_groups if group.is_required + ] + LOG.success(f"done. Required {len(required_label_groups)} label groups.") + return required_label_groups def get_standard_labels(): @@ -30,7 +40,6 @@ def get_standard_labels(): Get the list of standard labels that apply to every repository. @return: the list of standard labels """ - labels_dict = load_yaml_from_file("labels") standard_labels = [] for group_info in labels_dict["groups"]: @@ -51,7 +60,6 @@ def get_repo_specific_labels(): repository, skill levels will be added on top of the standard labels. @return: the dict mapping repo names to skill labels """ - skill_group = Group(color="5ff1f5", name="skill") labels_dict = load_yaml_from_file("skills") repo_specific_labels = {} @@ -73,7 +81,6 @@ def get_skill_label_from_name(skill_group, skill_name): @param skill_name: the name of the skill to convert into a label @return: an instance of Label derived from the skill name """ - return Label( name=skill_name.lower(), description=f"Requires proficiency in '{skill_name}'", @@ -90,7 +97,6 @@ def load_yaml_from_file(file_name): @param file_name: the name of the file to load @return: the contents of the YAML file as a Python object """ - file_path = get_datafile_path(file_name) with open(file_path, "r") as file: data = yaml.safe_load(file) @@ -104,7 +110,6 @@ def get_datafile_path(file_name): @param file_name: the name of the file whose path is required @return: the path to the file """ - current_file = Path(__file__).resolve() data_file = current_file.parent.joinpath(f"{file_name}.yml") return data_file @@ -115,10 +120,9 @@ def get_labels(): Get the list of standard and repository-specific labels. @return: the list of standard and repository-specific labels """ - standard_labels = get_standard_labels() repo_specific_labels = get_repo_specific_labels() return standard_labels, repo_specific_labels -__all__ = ["get_labels", "get_groups"] +__all__ = ["get_labels", "get_required_label_groups"] diff --git a/ccos/norm/set_labels.py b/ccos/norm/set_labels.py index 0f3410b..9f62398 100644 --- a/ccos/norm/set_labels.py +++ b/ccos/norm/set_labels.py @@ -1,14 +1,7 @@ # Standard library -import inspect import logging -import os.path -# First-party/Local -import ccos.log - -log_name = os.path.basename(os.path.splitext(inspect.stack()[-1].filename)[0]) -LOG = logging.getLogger(log_name) -ccos.log.reset_handler() +LOG = logging.root def map_repo_to_labels(repo, final_labels, non_destructive=True): @@ -20,54 +13,53 @@ def map_repo_to_labels(repo, final_labels, non_destructive=True): @param final_labels: the list of labels that should be present on the repo @param non_destructive: whether to trim extra labels or preserve them """ - LOG.info("Fetching initial labels...") initial_labels = { label.name.casefold(): label for label in repo.get_labels() } - LOG.log(ccos.log.SUCCESS, f"done. Found {len(initial_labels)} labels.") + LOG.success(f"done. Found {len(initial_labels)} labels.") LOG.info("Parsing final labels...") final_labels = { label.qualified_name.casefold(): label for label in final_labels } - LOG.log(ccos.log.SUCCESS, f"done. Found {len(final_labels)} labels.") + LOG.success(f"done. Found {len(final_labels)} labels.") if not non_destructive: LOG.info("Syncing initial labels...") - ccos.log.change_indent(+1) + LOG.change_indent(+1) for initial_label_name, initial_label in initial_labels.items(): LOG.info(f"Syncing '{initial_label_name}'...") - ccos.log.change_indent(+1) + LOG.change_indent(+1) if initial_label_name not in final_labels: LOG.info("Does not exist, deleting...") initial_label.delete() - LOG.log(ccos.log.SUCCESS, "done.") - ccos.log.change_indent(-1) - LOG.log(ccos.log.SUCCESS, "done.") - ccos.log.change_indent(-1) - LOG.log(ccos.log.SUCCESS, "done.") + LOG.success("done.") + LOG.change_indent(-1) + LOG.success("done.") + LOG.change_indent(-1) + LOG.success("done.") LOG.info("Syncing final labels...") - ccos.log.change_indent(+1) + LOG.change_indent(+1) for final_label_name, final_label in final_labels.items(): LOG.info(f"Syncing '{final_label_name}'...") - ccos.log.change_indent(+1) + LOG.change_indent(+1) if final_label_name not in initial_labels: LOG.info("Did not exist, creating...") repo.create_label(**final_label.api_arguments) - LOG.log(ccos.log.SUCCESS, "done.") + LOG.success("done.") elif final_label != initial_labels[final_label_name]: LOG.info("Differences found, updating...") initial_label = initial_labels[final_label_name] initial_label.edit(**final_label.api_arguments) - LOG.log(ccos.log.SUCCESS, "done.") + LOG.success("done.") else: LOG.info("Match found, moving on.") - ccos.log.change_indent(-1) - LOG.log(ccos.log.SUCCESS, "done.") - ccos.log.change_indent(-1) - LOG.log(ccos.log.SUCCESS, "done.") + LOG.change_indent(-1) + LOG.success("done.") + LOG.change_indent(-1) + LOG.success("done.") def set_labels(repos, standard_labels, repo_specific_labels): @@ -75,14 +67,13 @@ def set_labels(repos, standard_labels, repo_specific_labels): Set labels on all repos for the organisation. This is the main entrypoint of the module. """ - for repo in list(repos): LOG.info(f"Getting labels for repo '{repo.name}'...") labels = standard_labels + repo_specific_labels.get(repo.name, []) - LOG.log(ccos.log.SUCCESS, f"done. Found {len(labels)} labels.") + LOG.success(f"done. Found {len(labels)} labels.") LOG.info(f"Syncing labels for repo '{repo.name}'...") map_repo_to_labels(repo, labels) - LOG.log(ccos.log.SUCCESS, "done.") + LOG.success("done.") __all__ = ["set_labels"] diff --git a/ccos/norm/validate_issues.py b/ccos/norm/validate_issues.py index b7c2780..14413a8 100644 --- a/ccos/norm/validate_issues.py +++ b/ccos/norm/validate_issues.py @@ -1,20 +1,12 @@ # Standard library -import inspect import logging -import os.path # Third-party import yaml -# First-party/Local -import ccos.log - -log_name = os.path.basename(os.path.splitext(inspect.stack()[-1].filename)[0]) -LOG = logging.getLogger(log_name) -ccos.log.reset_handler() - TRIAGE_LABEL = "🚦 status: awaiting triage" LABEL_WORK_REQUIRED_LABEL = "🏷 status: label work required" +LOG = logging.root def dump_invalid_issues(invalid_issues): @@ -22,7 +14,6 @@ def dump_invalid_issues(invalid_issues): Dump all invalid issues in a file in the `tmp/` directory. @param invalid_issues: the hash of repos and their list of invalid issues """ - for invalid_issue_list in invalid_issues.values(): for invalid_issue in invalid_issue_list: issue = invalid_issue["issue"] @@ -32,18 +23,17 @@ def dump_invalid_issues(invalid_issues): LOG.info("Dumping issues in a file...") with open("/tmp/invalid_issues.yml", "w") as file: yaml.dump(invalid_issues, file) - LOG.log(ccos.log.SUCCESS, "done.") + LOG.success("done.") -def are_issue_labels_valid(issue, required_groups): +def are_issue_labels_valid(issue, required_label_groups): """ Check if the given issue is valid based on the labels applied to it. @param issue: the issue whose labels are being validated - @param required_groups: the label groups which must be applied on all - issues + @param required_label_groups: the label groups which must be applied on all + issues @return: whether the issues is or isn't valid, and why """ - labels = issue.get_labels() label_names = {label.name for label in labels} if issue.pull_request: @@ -56,17 +46,18 @@ def are_issue_labels_valid(issue, required_groups): ) return True, None # Issues that haven't been triaged are exempt - missing_groups = [] - for group in required_groups: + missing_label_groups = [] + for group in required_label_groups: required_labels = {label.qualified_name for label in group.labels} if not label_names.intersection(required_labels): - missing_groups.append(group.name) - if missing_groups: + missing_label_groups.append(group.name) + if missing_label_groups: issue.add_to_labels(LABEL_WORK_REQUIRED_LABEL) LOG.info(f"Issue '{issue.title}' has missing labels.") return ( False, - f"Missing labels from groups: {', '.join(missing_groups)}", + "Missing labels from label groups:" + f" {', '.join(missing_label_groups)}", ) else: if LABEL_WORK_REQUIRED_LABEL in label_names: @@ -76,67 +67,53 @@ def are_issue_labels_valid(issue, required_groups): return True, None -def get_invalid_issues_in_repo(repo, required_groups): +def get_invalid_issues_in_repo(repo, required_label_groups): """ Get a list of invalid issues in the given repo with the reason for marking them as such. @param repo: the repo in which to check for the validity of issues - @param required_groups: the label groups which must be applied on all + @param required_label_groups: the label groups which must be applied on all issues @return: a list of invalid issues and their causes """ - LOG.info(f"Getting issues for repo '{repo.name}'...") issues = repo.get_issues(state="open") - LOG.log(ccos.log.SUCCESS, "done.") + LOG.success("done.") invalid_issues = [] - ccos.log.change_indent(+1) + LOG.change_indent(+1) for issue in issues: LOG.info(f"Checking labels on '{issue.title}'...") - are_valid, reason = are_issue_labels_valid(issue, required_groups) + are_valid, reason = are_issue_labels_valid( + issue, required_label_groups + ) if not are_valid: invalid_issues.append({"issue": issue, "reason": reason}) - LOG.log(ccos.log.SUCCESS, "done.") - ccos.log.change_indent(-1) + LOG.success("done.") + LOG.change_indent(-1) return invalid_issues -def get_required_groups(groups): - """ - Get the list of all the groups, at least one label of which is required to - be present on every triaged issue. - @param groups: the groups to filter - @return: the filtered list of groups that that are required by definition - """ - - LOG.info(f"Filtering {len(groups)} groups...") - required_groups = [group for group in groups if group.is_required] - LOG.log(ccos.log.SUCCESS, f"done. Required {len(required_groups)} groups.") - return required_groups - - -def validate_issues(repos, groups): +def validate_issues(repos, required_label_groups): """ Validate the labels on all issues in all repos for the organisation. This is the main entrypoint of the module. """ - - required_groups = get_required_groups(groups) - invalid_issues = {} - LOG.info("Finding issues with invalid labels...") - ccos.log.change_indent(+1) + invalid_issues = {} + LOG.change_indent(+1) for repo in list(repos): LOG.info(f"Checking issues in repo '{repo.name}'...") invalid_issues[repo.name] = get_invalid_issues_in_repo( - repo, required_groups + repo, required_label_groups ) - LOG.log(ccos.log.SUCCESS, "done.") - ccos.log.change_indent(-1) - LOG.log(ccos.log.SUCCESS, "done.") + LOG.success("done.") + LOG.change_indent(-1) + LOG.success("done.") + LOG.change_indent(-1) dump_invalid_issues(invalid_issues) + LOG.change_indent(+1) __all__ = ["validate_issues"] diff --git a/ccos/teams/get_community_team_data.py b/ccos/teams/get_community_team_data.py index 1098bba..f1b31d1 100644 --- a/ccos/teams/get_community_team_data.py +++ b/ccos/teams/get_community_team_data.py @@ -1,29 +1,15 @@ -""" -This script pulls the members of the Community Team from the databag in the -OS@CC repository, formats it to match the required structure for setting up -GitHub teams and then syncs the teams to GitHub. - -This file intentionally has an external API identical to that of -`push_data_to_ccos/get_community_team_data.py`. -""" - # Standard library -import inspect import logging -import os.path import re # Third-party import requests -# First-party/Local -import ccos.log - -# Constants should match 'push_data_to_ccos/push_data_via_git.py' +# Constants should match 'ccos/data/push_data_via_git.py' GITHUB_ORGANIZATION = "creativecommons" GITHUB_REPO_NAME = "creativecommons.github.io-source" -# Constants should match 'push_data_to_ccos/sync_data.py' +# Constants should match 'push_data_to_ccos.py' CT_MEMBERS = "community_team_members.json" DATABAG_URL = ( @@ -31,9 +17,7 @@ f"{GITHUB_REPO_NAME}/main/databags/{CT_MEMBERS}" ) -log_name = os.path.basename(os.path.splitext(inspect.stack()[-1].filename)[0]) -LOG = logging.getLogger(log_name) -ccos.log.reset_handler() +LOG = logging.root def fetch_databag(): @@ -87,8 +71,7 @@ def fetch_databag(): formatted_project["roles"][role].append(member) databag["projects"].append(formatted_project) - LOG.log(ccos.log.SUCCESS, "Done.") - LOG.log(ccos.log.SUCCESS, "Pull successful.") + LOG.success("done.") return databag diff --git a/ccos/teams/set_codeowners.py b/ccos/teams/set_codeowners.py index 1198c48..f9aebc7 100644 --- a/ccos/teams/set_codeowners.py +++ b/ccos/teams/set_codeowners.py @@ -1,9 +1,7 @@ # Standard library import datetime -import inspect import logging import os -import os.path from pathlib import Path from tempfile import TemporaryDirectory @@ -11,7 +9,6 @@ import git # First-party/Local -import ccos.log from ccos.gh_utils import ( GITHUB_ORGANIZATION, get_cc_organization, @@ -20,9 +17,6 @@ ) from ccos.teams.set_teams_on_github import map_role_to_team -GIT_USER_NAME = "CC creativecommons.github.io Bot" -GIT_USER_EMAIL = "cc-creativecommons-github-io-bot@creativecommons.org" -SYNC_BRANCH = "ct_codeowners" CODEOWNERS_TEMPLATE = """\ # https://help.github.com/en/articles/about-code-owners # If you want to match two or more code owners with the same pattern, all the @@ -30,10 +24,10 @@ # line, the pattern matches only the last mentioned code owner. * @creativecommons/technology """ - -log_name = os.path.basename(os.path.splitext(inspect.stack()[-1].filename)[0]) -LOG = logging.getLogger(log_name) -ccos.log.reset_handler() +GIT_USER_NAME = "CC creativecommons.github.io Bot" +GIT_USER_EMAIL = "cc-creativecommons-github-io-bot@creativecommons.org" +LOG = logging.root +SYNC_BRANCH = "ct_codeowners" def create_codeowners_for_data(args, databag): @@ -64,7 +58,7 @@ def create_codeowners_for_data(args, databag): check_and_fix_repo( args, organization, repo_name, teams, temp_dir ) - LOG.log(ccos.log.SUCCESS, "Done") + LOG.success("Done") def set_up_git_user(): @@ -126,7 +120,7 @@ def check_and_fix_repo(args, organization, repo_name, teams, temp_dir): os.makedirs(codeowners_path.parent, exist_ok=True) with open(codeowners_path, "w") as codeowners_file: codeowners_file.write(CODEOWNERS_TEMPLATE) - LOG.log(ccos.log.SUCCESS, "Done.") + LOG.success("done.") teams = filter_valid_teams(gh_repo, teams) fix_required = add_missing_teams(codeowners_path, teams) @@ -137,7 +131,7 @@ def check_and_fix_repo(args, organization, repo_name, teams, temp_dir): push_changes(args, local_repo, branch_name) create_pull_request(args, gh_repo, branch_name) - LOG.log(ccos.log.SUCCESS, "Done.") + LOG.success("done.") def filter_valid_teams(gh_repo, teams): @@ -184,7 +178,7 @@ def push_changes(args, local_repo, branch_name): LOG.info("Pushing to GitHub...") origin = local_repo.remotes.origin origin.push(f"{branch_name}:{branch_name}") - LOG.log(ccos.log.SUCCESS, f"Pushed to {branch_name}.") + LOG.success(f"Pushed to {branch_name}.") def create_pull_request(args, gh_repo, branch_name): @@ -210,7 +204,7 @@ def create_pull_request(args, gh_repo, branch_name): # default branch could be 'main', 'master', 'prod', etc. base=gh_repo.default_branch, ) - LOG.log(ccos.log.SUCCESS, f"PR at {pr.url}.") + LOG.success(f"PR at {pr.url}.") def set_up_repo(clone_url, repo_dir): @@ -258,7 +252,7 @@ def add_missing_teams(codeowners_path, teams): LOG.info("CODEOWNERS is incomplete, populating...") with open(codeowners_path, "w") as codeowners_file: codeowners_file.writelines(new_codeowners) - LOG.log(ccos.log.SUCCESS, "Done.") + LOG.success("done.") return fix_required diff --git a/ccos/teams/set_teams_on_github.py b/ccos/teams/set_teams_on_github.py index 5a20990..62b7d1d 100644 --- a/ccos/teams/set_teams_on_github.py +++ b/ccos/teams/set_teams_on_github.py @@ -1,20 +1,18 @@ # Standard library -import inspect import logging -import os.path import sys # Third-party from github import UnknownObjectException # First-party/Local -import ccos.log from ccos.gh_utils import ( get_cc_organization, get_team_slug_name, set_up_github_client, ) +LOG = logging.root PERMISSIONS = { "Project Contributor": None, "Project Collaborator": "triage", @@ -22,10 +20,6 @@ "Project Maintainer": "maintain", } -log_name = os.path.basename(os.path.splitext(inspect.stack()[-1].filename)[0]) -LOG = logging.getLogger(log_name) -ccos.log.reset_handler() - def create_teams_for_data(databag): client = set_up_github_client() @@ -49,20 +43,20 @@ def create_teams_for_data(databag): LOG.info(f"Finding team for role {role}...") team = map_role_to_team(organization, project_name, role) - LOG.log(ccos.log.SUCCESS, "Done.") + LOG.success("done.") LOG.info(f"Populating repos for team {team.name}...") repos = project["repos"] map_team_to_repos(organization, team, repos, True) set_team_repo_permissions(team, PERMISSIONS[role]) - LOG.log(ccos.log.SUCCESS, "Done.") + LOG.success("done.") LOG.info(f"Populating members for team {team.name}...") members = [member["github"] for member in members] map_team_to_members(client, team, members, True) - LOG.log(ccos.log.SUCCESS, "Done.") - LOG.log(ccos.log.SUCCESS, "Done.") - LOG.log(ccos.log.SUCCESS, "Done.") + LOG.success("done.") + LOG.success("done.") + LOG.success("done.") def map_team_to_members( @@ -153,7 +147,7 @@ def set_team_repo_permissions(team, permission): f"Populating {permission} permission on {repo.full_name} repo...", ) team.set_repo_permission(repo, permission) - LOG.log(ccos.log.SUCCESS, "Done.") + LOG.success("done.") def map_role_to_team(organization, project_name, role, create_if_absent=True): @@ -188,7 +182,7 @@ def map_role_to_team(organization, project_name, role, create_if_absent=True): del properties["privacy"] if properties and properties != {"name": team.name}: team.edit(**properties) - LOG.log(ccos.log.SUCCESS, "Done.") + LOG.success("done.") else: if not create_if_absent: LOG.info("Did not exist, not creating.") @@ -196,5 +190,5 @@ def map_role_to_team(organization, project_name, role, create_if_absent=True): else: LOG.info("Did not exist, creating...") team = organization.create_team(**properties) - LOG.log(ccos.log.SUCCESS, "Done.") + LOG.success("done.") return team diff --git a/move_closed_issues.py b/move_closed_issues.py index 699541e..d75d5fa 100755 --- a/move_closed_issues.py +++ b/move_closed_issues.py @@ -1,11 +1,11 @@ #!/usr/bin/env python3 + """ Move closed Issues out of Backlog and into Active Sprint: Done. """ + # Standard library import argparse -import logging -import os import sys import traceback @@ -16,9 +16,7 @@ import ccos.log from ccos import gh_utils -ccos.log.set_up_logging() -LOG = logging.getLogger(os.path.splitext(os.path.basename(__file__))[0]) -ccos.log.reset_handler() +LOG = ccos.log.setup_logger() class ScriptError(Exception): @@ -87,6 +85,7 @@ def move_cards(args, github, backlog, done): if not args.dryrun: card.delete() LOG.info(" -> removed.") + LOG.success("done.") def main(): diff --git a/normalize_repos.py b/normalize_repos.py index 86eda29..2ea5612 100755 --- a/normalize_repos.py +++ b/normalize_repos.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# vim: set fileencoding=utf-8: """ This script ensures that all active repositories in the creativecommons GitHub @@ -8,8 +7,6 @@ # Standard library import argparse -import logging -import os.path import sys import traceback @@ -21,13 +18,11 @@ import ccos.log from ccos import gh_utils from ccos.norm import branch_protections -from ccos.norm.get_labels import get_groups, get_labels +from ccos.norm.get_labels import get_labels, get_required_label_groups from ccos.norm.set_labels import set_labels from ccos.norm.validate_issues import validate_issues -ccos.log.set_up_logging() -LOG = logging.getLogger(os.path.basename(__file__)) -ccos.log.reset_handler() +LOG = ccos.log.setup_logger() class ScriptError(Exception): @@ -76,8 +71,11 @@ def get_cc_repos(github): def get_select_repos(args): + LOG.info("Get GitHub data") github = gh_utils.set_up_github_client() + LOG.change_indent(-1) repos = list(get_cc_repos(github)) + LOG.change_indent(+1) if args.repos: repos_selected = [] for repo in repos: @@ -98,16 +96,16 @@ def set_repo_labels(args, repos): return LOG.info("Syncing labels...") set_labels(repos, *get_labels()) - LOG.log(ccos.log.SUCCESS, "done.") + LOG.success("done.") def validate_issue_labels(args, repos): if args.skip_issues: return LOG.info("Checking issues...") - groups = get_groups() - validate_issues(repos, groups) - LOG.log(ccos.log.SUCCESS, "done.") + required_label_groups = get_required_label_groups() + validate_issues(repos, required_label_groups) + LOG.success("done.") def is_engineering_project(repo): @@ -167,17 +165,14 @@ def update_branches(args, repos): return LOG.info("Evaluting repositories for branch protections...") for repo in repos: - # TODO: Set up automatic deletion of merged branches update_branch_protection(repo) - LOG.log(ccos.log.SUCCESS, "done.") + LOG.success("done.") def main(): args = setup() LOG.info("Starting normalization") - ccos.log.change_indent(-1) repos = get_select_repos(args) - ccos.log.change_indent(+1) set_repo_labels(args, repos) validate_issue_labels(args, repos) update_branches(args, repos) diff --git a/push_data_to_ccos.py b/push_data_to_ccos.py index 0cdb5df..dac688e 100755 --- a/push_data_to_ccos.py +++ b/push_data_to_ccos.py @@ -3,25 +3,20 @@ # Standard library import argparse -import logging -import os.path import sys import traceback # First-party/Local import ccos.log -from ccos.data.get_community_team_data import ( - get_community_team_data, - setup_asana_client, -) +from ccos import gh_utils +from ccos.data.asana import get_asana_team_members, setup_asana_client +from ccos.data.get_community_team_data import get_community_team_data from ccos.data.get_repo_data import get_repo_data, get_repo_names from ccos.data.push_data_via_git import push_data DAILY_DATABAGS = ["repos", "community_team_members"] -ccos.log.set_up_logging() -LOG = logging.getLogger(os.path.basename(__file__)) -ccos.log.reset_handler() +LOG = ccos.log.setup_logger() class ScriptError(Exception): @@ -51,15 +46,20 @@ def setup(): def main(): args = setup() - asana_client = setup_asana_client() + github_client = gh_utils.set_up_github_client() + gh_org_cc = gh_utils.get_cc_organization(github_client) if "repos" in args.databags: LOG.info("updating repos.json") - push_data(get_repo_data(), "repos.json") + push_data(get_repo_data(gh_org_cc), "repos.json") + LOG.success("done.") if "community_team_members" in args.databags: LOG.info("community_team_members.json") - repo_names = get_repo_names() - community_data = get_community_team_data(asana_client, repo_names) + asana_client = setup_asana_client() + team_members = get_asana_team_members(asana_client) + repo_names = get_repo_names(gh_org_cc) + community_data = get_community_team_data(team_members, repo_names) push_data(community_data, "community_team_members.json") + LOG.success("done.") if __name__ == "__main__": diff --git a/sync_community_teams.py b/sync_community_teams.py index 89135c5..94188d6 100755 --- a/sync_community_teams.py +++ b/sync_community_teams.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 + """ Create GitHub teams for the Community teams and update their membership based on the community_team_members.json Lektor databag. @@ -7,7 +8,6 @@ # Standard library import argparse import logging -import os.path import sys import traceback @@ -17,9 +17,7 @@ from ccos.teams.set_codeowners import create_codeowners_for_data from ccos.teams.set_teams_on_github import create_teams_for_data -ccos.log.set_up_logging() -LOG = logging.getLogger(os.path.splitext(os.path.basename(__file__))[0]) -ccos.log.reset_handler() +LOG = ccos.log.setup_logger() class ScriptError(Exception): @@ -52,6 +50,8 @@ def main(): if args.debug: LOG.setLevel(logging.DEBUG) LOG.debug("Debug mode: no changes will be made to GitHub repositories") + else: + LOG.info("Synchronizing community teams") community_team_data = get_community_team_data() if not args.debug: create_teams_for_data(community_team_data)