diff --git a/.github/actions/publish-from-template/render_template.py b/.github/actions/publish-from-template/render_template.py index 9c142152d..d1c11750f 100644 --- a/.github/actions/publish-from-template/render_template.py +++ b/.github/actions/publish-from-template/render_template.py @@ -1,6 +1,6 @@ import os -import pathlib import sys +from pathlib import Path import jinja2 @@ -13,5 +13,5 @@ def main(template_path): if __name__ == "__main__": - template_path = pathlib.Path(sys.argv[1]) + template_path = Path(sys.argv[1]) main(template_path) diff --git a/pyproject.toml b/pyproject.toml index a1c745ae2..4c9cf08ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,9 +97,15 @@ Source = "https://github.com/nebari-dev/nebari" nebari = "_nebari.cli.main:app" [tool.ruff] +select = [ + "E", + "F", + "PTH", +] ignore = [ "E501", # Line too long "F821", # Undefined name + "PTH123", # open() should be replaced by Path.open() ] extend-exclude = [ "src/_nebari/template", diff --git a/scripts/aws-force-destroy.py b/scripts/aws-force-destroy.py index 83d1afbf9..f58d29248 100644 --- a/scripts/aws-force-destroy.py +++ b/scripts/aws-force-destroy.py @@ -1,7 +1,7 @@ import argparse import logging -import pathlib import time +from pathlib import Path from _nebari.utils import check_cloud_credentials, load_yaml, timer @@ -17,7 +17,7 @@ def main(): def handle_force_destroy(args): - config_filename = pathlib.Path(args.config) + config_filename = Path(args.config) if not config_filename.is_file(): raise ValueError( f"passed in configuration filename={config_filename} must exist" diff --git a/scripts/helm-validate.py b/scripts/helm-validate.py index 0262e9094..b655bea2c 100644 --- a/scripts/helm-validate.py +++ b/scripts/helm-validate.py @@ -1,13 +1,13 @@ import json import logging import os -import pathlib import re +from pathlib import Path import hcl2 from tqdm import tqdm -from nebari.utils import deep_merge +from _nebari.utils import deep_merge # Configure logging logging.basicConfig(level=logging.INFO) @@ -28,7 +28,7 @@ def get_filepaths_that_contain_helm_release(self): """Get list of helm charts from nebari code-base""" # using pathlib to get list of files in the project root dir, look for all .tf files that # contain helm_release - path = pathlib.Path(__file__).parent.parent.absolute() + path = Path(__file__).parent.parent.absolute() path_tree = path.glob(f"{self.stages_dir}/**/main.tf") paths = [] for file in path_tree: @@ -72,7 +72,7 @@ def _load_variable_value(self, argument, parent_contents): raise ValueError(f"Could not find variable {var_name}") def retrieve_helm_information(self, filepath): - parent_path = pathlib.Path(filepath).parent + parent_path = Path(filepath).parent if parent_path.name in self.skip_charts: self.logger.debug(f"Skipping {parent_path.name}") @@ -166,8 +166,9 @@ def pull_helm_chart(chart_index: dict, skip_charts: list) -> None: Raises: ValueError: If a chart could not be found in the `helm_charts` directory after pulling. """ - chart_dir = "helm_charts" - os.makedirs(chart_dir, exist_ok=True) + chart_dir = Path("helm_charts") + chart_dir.mkdir(parents=True, exist_ok=True) + os.chdir(chart_dir) for chart_name, chart_metadata in tqdm( @@ -184,14 +185,14 @@ def pull_helm_chart(chart_index: dict, skip_charts: list) -> None: f"helm pull {chart_name} --version {chart_version} --repo {chart_repository} --untar" ) - chart_filename = f"{chart_name}-{chart_version}.tgz" - if not os.path.exists(chart_filename): + chart_filename = Path(f"{chart_name}-{chart_version}.tgz") + if not chart_filename.exists(): raise ValueError( f"Could not find {chart_name}:{chart_version} directory in {chart_dir}." ) print("All charts downloaded successfully!") - # shutil.rmtree(pathlib.Path(os.getcwd()).parent / chart_dir) + # shutil.rmtree(Path(os.getcwd()).parent / chart_dir) def add_workflow_job_summary(chart_index: dict): diff --git a/scripts/keycloak-export.py b/scripts/keycloak-export.py index 878e1d6ff..486c93e94 100644 --- a/scripts/keycloak-export.py +++ b/scripts/keycloak-export.py @@ -1,8 +1,8 @@ import argparse import json import logging -import pathlib import sys +from pathlib import Path from _nebari.keycloak import get_keycloak_admin_from_config @@ -18,7 +18,7 @@ def main(): def handle_keycloak_export(args): - config_filename = pathlib.Path(args.config) + config_filename = Path(args.config) if not config_filename.is_file(): raise ValueError( f"passed in configuration filename={config_filename} must exist" diff --git a/src/_nebari/cli/main.py b/src/_nebari/cli/main.py index 7702a8517..cbc0f03ef 100644 --- a/src/_nebari/cli/main.py +++ b/src/_nebari/cli/main.py @@ -49,6 +49,34 @@ DEV_COMMAND_MSG = "Development tools and advanced features." +def path_callback(value: str) -> Path: + return Path(value).expanduser().resolve() + + +def config_path_callback(value: str) -> Path: + value = path_callback(value) + if not value.is_file(): + raise ValueError(f"Passed configuration path {value} does not exist!") + return value + + +CONFIG_PATH_OPTION: Path = typer.Option( + ..., + "--config", + "-c", + help="nebari configuration yaml file path, please pass in as -c/--config flag", + callback=config_path_callback, +) + +OUTPUT_PATH_OPTION: Path = typer.Option( + Path.cwd(), + "-o", + "--output", + help="output directory", + callback=path_callback, +) + + class OrderCommands(TyperGroup): def list_commands(self, ctx: Context): """Return list of commands in the order appear.""" @@ -193,12 +221,7 @@ def init( @app.command(rich_help_panel=SECOND_COMMAND_GROUP_NAME) def validate( - config: str = typer.Option( - ..., - "--config", - "-c", - help="nebari configuration yaml file path, please pass in as -c/--config flag", - ), + config_path=CONFIG_PATH_OPTION, enable_commenting: bool = typer.Option( False, "--enable-commenting", help="Toggle PR commenting on GitHub Actions" ), @@ -206,13 +229,7 @@ def validate( """ Validate the values in the [purple]nebari-config.yaml[/purple] file are acceptable. """ - config_filename = Path(config) - if not config_filename.is_file(): - raise ValueError( - f"Passed in configuration filename={config_filename} must exist." - ) - - config = load_yaml(config_filename) + config = load_yaml(config_path) if enable_commenting: # for PR's only @@ -225,18 +242,8 @@ def validate( @app.command(rich_help_panel=SECOND_COMMAND_GROUP_NAME) def render( - output: str = typer.Option( - "./", - "-o", - "--output", - help="output directory", - ), - config: str = typer.Option( - ..., - "-c", - "--config", - help="nebari configuration yaml file path", - ), + output_path=OUTPUT_PATH_OPTION, + config_path=CONFIG_PATH_OPTION, dry_run: bool = typer.Option( False, "--dry-run", @@ -246,34 +253,17 @@ def render( """ Dynamically render the Terraform scripts and other files from your [purple]nebari-config.yaml[/purple] file. """ - config_filename = Path(config) - - if not config_filename.is_file(): - raise ValueError( - f"passed in configuration filename={config_filename} must exist" - ) - - config_yaml = load_yaml(config_filename) + config = load_yaml(config_path) - verify(config_yaml) + verify(config) - render_template(output, config, dry_run=dry_run) + render_template(output_path, config_path, dry_run=dry_run) @app.command() def deploy( - config: str = typer.Option( - ..., - "--config", - "-c", - help="nebari configuration yaml file path", - ), - output: str = typer.Option( - "./", - "-o", - "--output", - help="output directory", - ), + config_path=CONFIG_PATH_OPTION, + output_path=OUTPUT_PATH_OPTION, dns_provider: str = typer.Option( False, "--dns-provider", @@ -308,22 +298,15 @@ def deploy( """ Deploy the Nebari cluster from your [purple]nebari-config.yaml[/purple] file. """ - config_filename = Path(config) - - if not config_filename.is_file(): - raise ValueError( - f"passed in configuration filename={config_filename} must exist" - ) + config = load_yaml(config_path) - config_yaml = load_yaml(config_filename) - - verify(config_yaml) + verify(config) if not disable_render: - render_template(output, config) + render_template(output_path, config_path) deploy_configuration( - config_yaml, + config, dns_provider=dns_provider, dns_auto_provision=dns_auto_provision, disable_prompt=disable_prompt, @@ -334,15 +317,8 @@ def deploy( @app.command() def destroy( - config: str = typer.Option( - ..., "-c", "--config", help="nebari configuration file path" - ), - output: str = typer.Option( - "./", - "-o", - "--output", - help="output directory", - ), + config_path=CONFIG_PATH_OPTION, + output_path=OUTPUT_PATH_OPTION, disable_render: bool = typer.Option( False, "--disable-render", @@ -358,21 +334,15 @@ def destroy( Destroy the Nebari cluster from your [purple]nebari-config.yaml[/purple] file. """ - def _run_destroy(config=config, disable_render=disable_render): - config_filename = Path(config) - if not config_filename.is_file(): - raise ValueError( - f"passed in configuration filename={config_filename} must exist" - ) - - config_yaml = load_yaml(config_filename) + def _run_destroy(config_path=config_path, disable_render=disable_render): + config = load_yaml(config_path) - verify(config_yaml) + verify(config) if not disable_render: - render_template(output, config) + render_template(output_path, config_path) - destroy_configuration(config_yaml) + destroy_configuration(config) if disable_prompt: _run_destroy() @@ -384,12 +354,7 @@ def _run_destroy(config=config, disable_render=disable_render): @app.command(rich_help_panel=SECOND_COMMAND_GROUP_NAME) def upgrade( - config: str = typer.Option( - ..., - "-c", - "--config", - help="nebari configuration file path", - ), + config_path=CONFIG_PATH_OPTION, attempt_fixes: bool = typer.Option( False, "--attempt-fixes", @@ -403,29 +368,13 @@ def upgrade( See the project [green]RELEASE.md[/green] for details. """ - config_filename = Path(config) - if not config_filename.is_file(): - raise ValueError( - f"passed in configuration filename={config_filename} must exist" - ) - - do_upgrade(config_filename, attempt_fixes=attempt_fixes) + do_upgrade(config_path, attempt_fixes=attempt_fixes) @app.command(rich_help_panel=SECOND_COMMAND_GROUP_NAME) def support( - config_filename: str = typer.Option( - ..., - "-c", - "--config", - help="nebari configuration file path", - ), - output: str = typer.Option( - "./nebari-support-logs.zip", - "-o", - "--output", - help="output filename", - ), + config_path=CONFIG_PATH_OPTION, + output_path=OUTPUT_PATH_OPTION, ): """ Support tool to write all Kubernetes logs locally and compress them into a zip file. @@ -437,7 +386,7 @@ def support( v1 = client.CoreV1Api() - namespace = get_config_namespace(config=config_filename) + namespace = get_config_namespace(config_path) pods = v1.list_namespaced_pod(namespace=namespace) @@ -476,20 +425,14 @@ def support( file.write("%s not available" % pod.metadata.name) raise e - with ZipFile(output, "w") as zip: + with ZipFile(output_path, "w") as zip: for file in list(Path(f"./log/{namespace}").glob("*.txt")): print(file) zip.write(file) -def get_config_namespace(config): - config_filename = Path(config) - if not config_filename.is_file(): - raise ValueError( - f"passed in configuration filename={config_filename} must exist" - ) - - with config_filename.open() as f: +def get_config_namespace(config_path): + with open(config_path) as f: config = yaml.safe_load(f.read()) return config["namespace"] diff --git a/src/_nebari/deploy.py b/src/_nebari/deploy.py index d7af42eb8..8dbd62013 100644 --- a/src/_nebari/deploy.py +++ b/src/_nebari/deploy.py @@ -1,7 +1,7 @@ import logging -import os import subprocess import textwrap +from pathlib import Path from _nebari.provider import terraform from _nebari.provider.dns.cloudflare import update_record @@ -17,14 +17,14 @@ def provision_01_terraform_state(stage_outputs, config): - directory = "stages/01-terraform-state" + directory = Path("stages/01-terraform-state") if config["provider"] in {"existing", "local"}: stage_outputs[directory] = {} else: stage_outputs[directory] = terraform.deploy( terraform_import=True, - directory=os.path.join(directory, config["provider"]), + directory=directory / config["provider"], input_vars=input_vars.stage_01_terraform_state(stage_outputs, config), state_imports=state_imports.stage_01_terraform_state(stage_outputs, config), ) @@ -48,7 +48,7 @@ def provision_02_infrastructure(stage_outputs, config, disable_checks=False): directory = "stages/02-infrastructure" stage_outputs[directory] = terraform.deploy( - os.path.join(directory, config["provider"]), + Path(directory) / config["provider"], input_vars=input_vars.stage_02_infrastructure(stage_outputs, config), ) diff --git a/src/_nebari/destroy.py b/src/_nebari/destroy.py index 736085530..eefd3eefc 100644 --- a/src/_nebari/destroy.py +++ b/src/_nebari/destroy.py @@ -1,6 +1,6 @@ import functools import logging -import os +from pathlib import Path from _nebari.provider import terraform from _nebari.stages import input_vars, state_imports @@ -30,13 +30,13 @@ def gather_stage_outputs(config): and config["terraform_state"]["type"] == "remote" ): stage_outputs["stages/01-terraform-state"] = _terraform_init_output( - directory=os.path.join("stages/01-terraform-state", config["provider"]), + directory=Path("stages/01-terraform-state") / config["provider"], input_vars=input_vars.stage_01_terraform_state(stage_outputs, config), state_imports=state_imports.stage_01_terraform_state(stage_outputs, config), ) stage_outputs["stages/02-infrastructure"] = _terraform_init_output( - directory=os.path.join("stages/02-infrastructure", config["provider"]), + directory=Path("stages/02-infrastructure") / config["provider"], input_vars=input_vars.stage_02_infrastructure(stage_outputs, config), ) @@ -146,7 +146,7 @@ def _terraform_destroy(ignore_errors=False, terraform_apply=False, **kwargs): ) status["stages/02-infrastructure"] = _terraform_destroy( - directory=os.path.join("stages/02-infrastructure", config["provider"]), + directory=Path("stages/02-infrastructure") / config["provider"], input_vars=input_vars.stage_02_infrastructure(stage_outputs, config), ignore_errors=True, ) @@ -159,7 +159,7 @@ def _terraform_destroy(ignore_errors=False, terraform_apply=False, **kwargs): # acl and force_destroy do not import properly # and only get refreshed properly with an apply terraform_apply=True, - directory=os.path.join("stages/01-terraform-state", config["provider"]), + directory=Path("stages/01-terraform-state") / config["provider"], input_vars=input_vars.stage_01_terraform_state(stage_outputs, config), ignore_errors=True, ) diff --git a/src/_nebari/initialize.py b/src/_nebari/initialize.py index f00288baa..69e9fb1b2 100644 --- a/src/_nebari/initialize.py +++ b/src/_nebari/initialize.py @@ -5,6 +5,7 @@ import secrets import string import tempfile +from pathlib import Path import requests @@ -375,12 +376,10 @@ def render_config( ) # Save default password to file - default_password_filename = os.path.join( - tempfile.gettempdir(), "NEBARI_DEFAULT_PASSWORD" - ) + default_password_filename = Path(tempfile.gettempdir()) / "NEBARI_DEFAULT_PASSWORD" with open(default_password_filename, "w") as f: f.write(default_password) - os.chmod(default_password_filename, 0o700) + default_password_filename.chmod(0o700) config["theme"]["jupyterhub"]["hub_title"] = f"Nebari - { project_name }" config["theme"]["jupyterhub"][ diff --git a/src/_nebari/provider/cicd/linter.py b/src/_nebari/provider/cicd/linter.py index 02d06743f..45cdb9f6a 100644 --- a/src/_nebari/provider/cicd/linter.py +++ b/src/_nebari/provider/cicd/linter.py @@ -1,7 +1,7 @@ import json import os -import pathlib import textwrap +from pathlib import Path import requests @@ -32,7 +32,7 @@ def parse_validation(message): def generate_lint_message(config): # prep for linting - pr_config = pathlib.Path("nebari-config.yaml") + pr_config = Path("nebari-config.yaml") # lint/validate nebari-config.yaml all_pass, messages, validate_code = nebari_validate(config) diff --git a/src/_nebari/provider/git.py b/src/_nebari/provider/git.py index ea7be4722..e731ad5d5 100644 --- a/src/_nebari/provider/git.py +++ b/src/_nebari/provider/git.py @@ -1,17 +1,18 @@ import configparser import os import subprocess +from pathlib import Path from _nebari.utils import change_directory def is_git_repo(path=None): - path = path or os.getcwd() + path = path or Path.cwd() return ".git" in os.listdir(path) def initialize_git(path=None): - path = path or os.getcwd() + path = path or Path.cwd() with change_directory(path): subprocess.check_output(["git", "init"]) # Ensure initial branch is called main @@ -19,10 +20,10 @@ def initialize_git(path=None): def add_git_remote(remote_path, path=None, remote_name="origin"): - path = path or os.getcwd() + path = path or Path.cwd() c = configparser.ConfigParser() - with open(os.path.join(path, ".git/config")) as f: + with open(path / ".git/config") as f: c.read_file(f) if f'remote "{remote_name}"' in c: if c[f'remote "{remote_name}"']["url"] == remote_path: diff --git a/src/_nebari/provider/terraform.py b/src/_nebari/provider/terraform.py index 7c4e05e45..9d5c8c78c 100644 --- a/src/_nebari/provider/terraform.py +++ b/src/_nebari/provider/terraform.py @@ -2,7 +2,6 @@ import io import json import logging -import os import platform import re import subprocess @@ -10,6 +9,7 @@ import tempfile import urllib.request import zipfile +from pathlib import Path from typing import Any, Dict, List from _nebari import constants @@ -98,10 +98,10 @@ def download_terraform_binary(version=constants.TERRAFORM_VERSION): } download_url = f"https://releases.hashicorp.com/terraform/{version}/terraform_{version}_{os_mapping[sys.platform]}_{architecture_mapping[platform.machine()]}.zip" - filename_directory = os.path.join(tempfile.gettempdir(), "terraform", version) - filename_path = os.path.join(filename_directory, "terraform") + filename_directory = Path(tempfile.gettempdir()) / "terraform" / version + filename_path = filename_directory / "terraform" - if not os.path.isfile(filename_path): + if not filename_path.is_file(): logger.info( f"downloading and extracting terraform binary from url={download_url} to path={filename_path}" ) @@ -110,7 +110,7 @@ def download_terraform_binary(version=constants.TERRAFORM_VERSION): download_file = zipfile.ZipFile(bytes_io) download_file.extract("terraform", filename_directory) - os.chmod(filename_path, 0o555) + filename_path.chmod(0o555) return filename_path @@ -216,12 +216,12 @@ def destroy(directory=None, targets=None, var_files=None): def rm_local_state(directory=None): logger.info(f"rm local state file terraform.tfstate directory={directory}") - tfstate_path = "terraform.tfstate" + tfstate_path = Path("terraform.tfstate") if directory: - tfstate_path = os.path.join(directory, tfstate_path) + tfstate_path = directory / tfstate_path - if os.path.isfile(tfstate_path): - os.remove(tfstate_path) + if tfstate_path.is_file(): + tfstate_path.unlink() # ========== Terraform JSON ============ diff --git a/src/_nebari/render.py b/src/_nebari/render.py index 14dc67284..c25aa9c2e 100644 --- a/src/_nebari/render.py +++ b/src/_nebari/render.py @@ -2,9 +2,9 @@ import hashlib import json import os -import pathlib import shutil import sys +from pathlib import Path from typing import Dict, List import yaml @@ -17,21 +17,19 @@ from _nebari.provider.cicd.github import gen_nebari_linter, gen_nebari_ops from _nebari.provider.cicd.gitlab import gen_gitlab_ci from _nebari.stages import tf_objects +from _nebari.utils import is_relative_to def render_template(output_directory, config_filename, dry_run=False): # get directory for nebari templates - template_directory = pathlib.Path(_nebari.__file__).parent / "template" + template_directory = Path(_nebari.__file__).parent / "template" # would be nice to remove assumption that input directory # is in local filesystem and a directory - template_directory = pathlib.Path(template_directory) if not template_directory.is_dir(): raise ValueError(f"template directory={template_directory} is not a directory") - output_directory = pathlib.Path(output_directory).resolve() - - if output_directory == str(pathlib.Path.home()): + if output_directory == Path.home(): print("ERROR: Deploying Nebari in home directory is not advised!") sys.exit(1) @@ -39,13 +37,12 @@ def render_template(output_directory, config_filename, dry_run=False): # into it in remove_existing_renders output_directory.mkdir(exist_ok=True, parents=True) - config_filename = pathlib.Path(config_filename) if not config_filename.is_file(): raise ValueError( f"cookiecutter configuration={config_filename} is not filename" ) - with config_filename.open() as f: + with open(config_filename) as f: yaml = YAML(typ="safe", pure=True) config = yaml.load(f) @@ -74,13 +71,13 @@ def render_template(output_directory, config_filename, dry_run=False): ): directories.append(f"stages/01-terraform-state/{config['provider']}") - source_dirs = [os.path.join(str(template_directory), _) for _ in directories] - output_dirs = [os.path.join(str(output_directory), _) for _ in directories] + source_dirs = [template_directory / Path(directory) for directory in directories] + output_dirs = [output_directory / Path(directory) for directory in directories] new, untracked, updated, deleted = inspect_files( source_dirs, output_dirs, - source_base_dir=str(template_directory), - output_base_dir=str(output_directory), + source_base_dir=template_directory, + output_base_dir=output_directory, ignore_filenames=[ "terraform.tfstate", ".terraform.lock.hcl", @@ -97,17 +94,17 @@ def render_template(output_directory, config_filename, dry_run=False): if new: table = Table("The following files will be created:", style="deep_sky_blue1") for filename in sorted(new): - table.add_row(filename, style="green") + table.add_row(str(filename), style="green") print(table) if updated: table = Table("The following files will be updated:", style="deep_sky_blue1") for filename in sorted(updated): - table.add_row(filename, style="green") + table.add_row(str(filename), style="green") print(table) if deleted: table = Table("The following files will be deleted:", style="deep_sky_blue1") for filename in sorted(deleted): - table.add_row(filename, style="green") + table.add_row(str(filename), style="green") print(table) if untracked: table = Table( @@ -115,35 +112,34 @@ def render_template(output_directory, config_filename, dry_run=False): style="deep_sky_blue1", ) for filename in sorted(updated): - table.add_row(filename, style="green") + table.add_row(str(filename), style="green") print(table) if dry_run: print("dry-run enabled no files will be created, updated, or deleted") else: for filename in new | updated: - input_filename = os.path.join(str(template_directory), filename) - output_filename = os.path.join(str(output_directory), filename) - os.makedirs(os.path.dirname(output_filename), exist_ok=True) + input_filename = template_directory / filename + output_filename = output_directory / filename + output_filename.parent.mkdir(parents=True, exist_ok=True) - if os.path.exists(input_filename): + if input_filename.exists(): shutil.copy(input_filename, output_filename) else: with open(output_filename, "w") as f: f.write(contents[filename]) for path in deleted: - abs_path = os.path.abspath(os.path.join(str(output_directory), path)) + abs_path = (output_directory / path).resolve() - # be extra cautious that deleted path is within output_directory - if not abs_path.startswith(str(output_directory)): + if not is_relative_to(abs_path, output_directory): raise Exception( f"[ERROR] SHOULD NOT HAPPEN filename was about to be deleted but path={abs_path} is outside of output_directory" ) - if os.path.isfile(abs_path): - os.remove(abs_path) - elif os.path.isdir(abs_path): + if abs_path.is_file(): + abs_path.unlink() + elif abs_path.is_dir(): shutil.rmtree(abs_path) @@ -185,7 +181,7 @@ def gen_gitignore(): """ from inspect import cleandoc - filestoignore = """ + files_to_ignore = """ # ignore terraform state .terraform terraform.tfstate @@ -195,7 +191,7 @@ def gen_gitignore(): # python __pycache__ """ - return {".gitignore": cleandoc(filestoignore)} + return {Path(".gitignore"): cleandoc(files_to_ignore)} def gen_cicd(config): @@ -211,12 +207,12 @@ def gen_cicd(config): cicd_provider = config["ci_cd"]["type"] if cicd_provider == "github-actions": - gha_dir = ".github/workflows/" - cicd_files[gha_dir + "nebari-ops.yaml"] = gen_nebari_ops(config) - cicd_files[gha_dir + "nebari-linter.yaml"] = gen_nebari_linter(config) + gha_dir = Path(".github") / "workflows" + cicd_files[gha_dir / "nebari-ops.yaml"] = gen_nebari_ops(config) + cicd_files[gha_dir / "nebari-linter.yaml"] = gen_nebari_linter(config) elif cicd_provider == "gitlab-ci": - cicd_files[".gitlab-ci.yml"] = gen_gitlab_ci(config) + cicd_files[Path(".gitlab-ci.yml")] = gen_gitlab_ci(config) else: raise ValueError( @@ -227,25 +223,25 @@ def gen_cicd(config): def inspect_files( - source_dirs: str, - output_dirs: str, - source_base_dir: str, - output_base_dir: str, + source_dirs: Path, + output_dirs: Path, + source_base_dir: Path, + output_base_dir: Path, ignore_filenames: List[str] = None, ignore_directories: List[str] = None, - deleted_paths: List[str] = None, + deleted_paths: List[Path] = None, contents: Dict[str, str] = None, ): """Return created, updated and untracked files by computing a checksum over the provided directory. Args: - source_dirs (str): The source dir used as base for comparssion - output_dirs (str): The destination dir which will be matched with - source_base_dir (str): Relative base path to source directory - output_base_dir (str): Relative base path to output directory + source_dirs (Path): The source dir used as base for comparison + output_dirs (Path): The destination dir which will be matched with + source_base_dir (Path): Relative base path to source directory + output_base_dir (Path): Relative base path to output directory ignore_filenames (list[str]): Filenames to ignore while comparing for changes ignore_directories (list[str]): Directories to ignore while comparing for changes - deleted_paths (list[str]): Paths that if exist in output directory should be deleted + deleted_paths (list[Path]): Paths that if exist in output directory should be deleted contents (dict): filename to content mapping for dynamically generated files """ ignore_filenames = ignore_filenames or [] @@ -256,35 +252,41 @@ def inspect_files( output_files = {} def list_files( - directory: str, ignore_filenames: List[str], ignore_directories: List[str] + directory: Path, ignore_filenames: List[str], ignore_directories: List[str] ): - for root, dirs, files in os.walk(directory): - dirs[:] = [d for d in dirs if d not in ignore_directories] - for file in files: - if file not in ignore_filenames: - yield os.path.join(root, file) - - for filename in contents: - source_files[filename] = hashlib.sha256( - contents[filename].encode("utf8") - ).hexdigest() - output_filename = os.path.join(output_base_dir, filename) - if os.path.isfile(output_filename): + for path in directory.rglob("*"): + if not path.is_file(): + continue + + if path.name in ignore_filenames: + continue + + if any( + d in ignore_directories for d in path.relative_to(directory).parts[:-1] + ): + continue + + yield path + + for filename, content in contents.items(): + source_files[filename] = hashlib.sha256(content.encode("utf8")).hexdigest() + output_filename = output_base_dir / filename + if output_filename.is_file(): output_files[filename] = hash_file(filename) deleted_files = set() for path in deleted_paths: - absolute_path = os.path.join(output_base_dir, path) - if os.path.exists(absolute_path): + absolute_path = output_base_dir / path + if absolute_path.exists(): deleted_files.add(path) for source_dir, output_dir in zip(source_dirs, output_dirs): for filename in list_files(source_dir, ignore_filenames, ignore_directories): - relative_path = os.path.relpath(filename, source_base_dir) + relative_path = filename.relative_to(source_base_dir) source_files[relative_path] = hash_file(filename) for filename in list_files(output_dir, ignore_filenames, ignore_directories): - relative_path = os.path.relpath(filename, output_base_dir) + relative_path = filename.relative_to(output_base_dir) output_files[relative_path] = hash_file(filename) new_files = source_files.keys() - output_files.keys() diff --git a/src/_nebari/stages/input_vars.py b/src/_nebari/stages/input_vars.py index 85859cd2b..629ec2538 100644 --- a/src/_nebari/stages/input_vars.py +++ b/src/_nebari/stages/input_vars.py @@ -1,6 +1,6 @@ import json -import os import tempfile +from pathlib import Path from urllib.parse import urlencode from _nebari.constants import ( @@ -44,8 +44,8 @@ def stage_01_terraform_state(stage_outputs, config): def stage_02_infrastructure(stage_outputs, config): if config["provider"] == "local": return { - "kubeconfig_filename": os.path.join( - tempfile.gettempdir(), "NEBARI_KUBECONFIG" + "kubeconfig_filename": str( + Path(tempfile.gettempdir()) / "NEBARI_KUBECONFIG" ), "kube_context": config["local"].get("kube_context"), } @@ -58,8 +58,8 @@ def stage_02_infrastructure(stage_outputs, config): "region": config["digital_ocean"]["region"], "kubernetes_version": config["digital_ocean"]["kubernetes_version"], "node_groups": config["digital_ocean"]["node_groups"], - "kubeconfig_filename": os.path.join( - tempfile.gettempdir(), "NEBARI_KUBECONFIG" + "kubeconfig_filename": str( + Path(tempfile.gettempdir()) / "NEBARI_KUBECONFIG" ), **config.get("do", {}).get("terraform_overrides", {}), } @@ -86,8 +86,8 @@ def stage_02_infrastructure(stage_outputs, config): } for key, value in config["google_cloud_platform"]["node_groups"].items() ], - "kubeconfig_filename": os.path.join( - tempfile.gettempdir(), "NEBARI_KUBECONFIG" + "kubeconfig_filename": str( + Path(tempfile.gettempdir()) / "NEBARI_KUBECONFIG" ), **config.get("gcp", {}).get("terraform_overrides", {}), } @@ -98,8 +98,8 @@ def stage_02_infrastructure(stage_outputs, config): "region": config["azure"]["region"], "kubernetes_version": config["azure"]["kubernetes_version"], "node_groups": config["azure"]["node_groups"], - "kubeconfig_filename": os.path.join( - tempfile.gettempdir(), "NEBARI_KUBECONFIG" + "kubeconfig_filename": str( + Path(tempfile.gettempdir()) / "NEBARI_KUBECONFIG" ), "resource_group_name": f'{config["project_name"]}-{config["namespace"]}', "node_resource_group_name": f'{config["project_name"]}-{config["namespace"]}-node-resource-group', @@ -123,8 +123,8 @@ def stage_02_infrastructure(stage_outputs, config): } for key, value in config["amazon_web_services"]["node_groups"].items() ], - "kubeconfig_filename": os.path.join( - tempfile.gettempdir(), "NEBARI_KUBECONFIG" + "kubeconfig_filename": str( + Path(tempfile.gettempdir()) / "NEBARI_KUBECONFIG" ), **config.get("amazon_web_services", {}).get("terraform_overrides", {}), } diff --git a/src/_nebari/stages/tf_objects.py b/src/_nebari/stages/tf_objects.py index 482436a34..3a961f678 100644 --- a/src/_nebari/stages/tf_objects.py +++ b/src/_nebari/stages/tf_objects.py @@ -1,3 +1,4 @@ +from pathlib import Path from typing import Dict from _nebari.provider.terraform import ( @@ -141,7 +142,10 @@ def NebariTerraformState(directory: str, nebari_config: Dict): def stage_01_terraform_state(config): if config["provider"] == "gcp": return { - "stages/01-terraform-state/gcp/_nebari.tf.json": tf_render_objects( + Path("stages") + / "01-terraform-state" + / "gcp" + / "_nebari.tf.json": tf_render_objects( [ NebariGCPProvider(config), ] @@ -149,7 +153,10 @@ def stage_01_terraform_state(config): } elif config["provider"] == "aws": return { - "stages/01-terraform-state/aws/_nebari.tf.json": tf_render_objects( + Path("stages") + / "01-terraform-state" + / "aws" + / "_nebari.tf.json": tf_render_objects( [ NebariAWSProvider(config), ] @@ -162,7 +169,10 @@ def stage_01_terraform_state(config): def stage_02_infrastructure(config): if config["provider"] == "gcp": return { - "stages/02-infrastructure/gcp/_nebari.tf.json": tf_render_objects( + Path("stages") + / "02-infrastructure" + / "gcp" + / "_nebari.tf.json": tf_render_objects( [ NebariGCPProvider(config), NebariTerraformState("02-infrastructure", config), @@ -171,7 +181,10 @@ def stage_02_infrastructure(config): } elif config["provider"] == "do": return { - "stages/02-infrastructure/do/_nebari.tf.json": tf_render_objects( + Path("stages") + / "02-infrastructure" + / "do" + / "_nebari.tf.json": tf_render_objects( [ NebariTerraformState("02-infrastructure", config), ] @@ -179,7 +192,10 @@ def stage_02_infrastructure(config): } elif config["provider"] == "azure": return { - "stages/02-infrastructure/azure/_nebari.tf.json": tf_render_objects( + Path("stages") + / "02-infrastructure" + / "azure" + / "_nebari.tf.json": tf_render_objects( [ NebariTerraformState("02-infrastructure", config), ] @@ -187,7 +203,10 @@ def stage_02_infrastructure(config): } elif config["provider"] == "aws": return { - "stages/02-infrastructure/aws/_nebari.tf.json": tf_render_objects( + Path("stages") + / "02-infrastructure" + / "aws" + / "_nebari.tf.json": tf_render_objects( [ NebariAWSProvider(config), NebariTerraformState("02-infrastructure", config), @@ -200,7 +219,9 @@ def stage_02_infrastructure(config): def stage_03_kubernetes_initialize(config): return { - "stages/03-kubernetes-initialize/_nebari.tf.json": tf_render_objects( + Path("stages") + / "03-kubernetes-initialize" + / "_nebari.tf.json": tf_render_objects( [ NebariTerraformState("03-kubernetes-initialize", config), NebariKubernetesProvider(config), @@ -212,7 +233,9 @@ def stage_03_kubernetes_initialize(config): def stage_04_kubernetes_ingress(config): return { - "stages/04-kubernetes-ingress/_nebari.tf.json": tf_render_objects( + Path("stages") + / "04-kubernetes-ingress" + / "_nebari.tf.json": tf_render_objects( [ NebariTerraformState("04-kubernetes-ingress", config), NebariKubernetesProvider(config), @@ -224,7 +247,9 @@ def stage_04_kubernetes_ingress(config): def stage_05_kubernetes_keycloak(config): return { - "stages/05-kubernetes-keycloak/_nebari.tf.json": tf_render_objects( + Path("stages") + / "05-kubernetes-keycloak" + / "_nebari.tf.json": tf_render_objects( [ NebariTerraformState("05-kubernetes-keycloak", config), NebariKubernetesProvider(config), @@ -236,7 +261,9 @@ def stage_05_kubernetes_keycloak(config): def stage_06_kubernetes_keycloak_configuration(config): return { - "stages/06-kubernetes-keycloak-configuration/_nebari.tf.json": tf_render_objects( + Path("stages") + / "06-kubernetes-keycloak-configuration" + / "_nebari.tf.json": tf_render_objects( [ NebariTerraformState("06-kubernetes-keycloak-configuration", config), ] @@ -246,7 +273,9 @@ def stage_06_kubernetes_keycloak_configuration(config): def stage_07_kubernetes_services(config): return { - "stages/07-kubernetes-services/_nebari.tf.json": tf_render_objects( + Path("stages") + / "07-kubernetes-services" + / "_nebari.tf.json": tf_render_objects( [ NebariTerraformState("07-kubernetes-services", config), NebariKubernetesProvider(config), @@ -258,7 +287,9 @@ def stage_07_kubernetes_services(config): def stage_08_nebari_tf_extensions(config): return { - "stages/08-nebari-tf-extensions/_nebari.tf.json": tf_render_objects( + Path("stages") + / "08-nebari-tf-extensions" + / "_nebari.tf.json": tf_render_objects( [ NebariTerraformState("08-nebari-tf-extensions", config), NebariKubernetesProvider(config), diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/config/conda_store_config.py b/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/config/conda_store_config.py index dda59ae95..6f9e1089d 100644 --- a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/config/conda_store_config.py +++ b/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/config/conda_store_config.py @@ -1,7 +1,7 @@ import json import logging -import os import tempfile +from pathlib import Path import requests from conda_store_server import api, orm, schema @@ -128,7 +128,7 @@ async def authenticate(self, request): for group in user_data.get("groups", []): # Use only the base name of Keycloak groups - group_name = os.path.basename(group) + group_name = Path(group).name namespaces.add(group_name) role_bindings[f"{group_name}/*"] = roles @@ -169,7 +169,7 @@ async def authenticate(self, request): # run arbitrary python code # compiling makes debugging easier: https://stackoverflow.com/a/437857 -extra_config_filename = os.path.join(tempfile.gettempdir(), "extra-config.py") +extra_config_filename = Path(tempfile.gettempdir()) / "extra-config.py" extra_config = config.get("extra-config", "") with open(extra_config_filename, "w") as f: f.write(extra_config) diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/files/gateway_config.py b/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/files/gateway_config.py index 508c49774..ae2532d55 100644 --- a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/files/gateway_config.py +++ b/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/files/gateway_config.py @@ -1,6 +1,6 @@ import functools import json -import os +from pathlib import Path import urllib3 from aiohttp import web @@ -77,7 +77,7 @@ async def authenticate(self, request): ) user.admin = "dask_gateway_admin" in data["roles"] - user.groups = [os.path.basename(group) for group in data["groups"]] + user.groups = [Path(group).name for group in data["groups"]] return user @@ -143,7 +143,7 @@ def base_node_group(options): def base_conda_store_mounts(namespace, name): conda_store_pvc_name = config["conda-store-pvc"] - conda_store_mount = config["conda-store-mount"] + conda_store_mount = Path(config["conda-store-mount"]) return { "scheduler_extra_pod_config": { @@ -159,7 +159,7 @@ def base_conda_store_mounts(namespace, name): "scheduler_extra_container_config": { "volumeMounts": [ { - "mountPath": os.path.join(conda_store_mount, namespace), + "mountPath": str(conda_store_mount / namespace), "name": "conda-store", "subPath": namespace, } @@ -178,7 +178,7 @@ def base_conda_store_mounts(namespace, name): "worker_extra_container_config": { "volumeMounts": [ { - "mountPath": os.path.join(conda_store_mount, namespace), + "mountPath": str(conda_store_mount / namespace), "name": "conda-store", "subPath": namespace, } @@ -187,9 +187,7 @@ def base_conda_store_mounts(namespace, name): "worker_cmd": "/opt/conda-run-worker", "scheduler_cmd": "/opt/conda-run-scheduler", "environment": { - "CONDA_ENVIRONMENT": os.path.join( - conda_store_mount, namespace, "envs", name - ), + "CONDA_ENVIRONMENT": str(conda_store_mount / namespace / "envs" / name), }, } diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterhub/03-profiles.py b/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterhub/03-profiles.py index 255d5e1fe..b12b352c1 100644 --- a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterhub/03-profiles.py +++ b/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterhub/03-profiles.py @@ -1,7 +1,7 @@ import copy import functools import json -import os +from pathlib import Path import z2jh from tornado import gen @@ -146,7 +146,7 @@ def profile_conda_store_mounts(username, groups): """ conda_store_pvc_name = z2jh.get_config("custom.conda-store-pvc") - conda_store_mount = z2jh.get_config("custom.conda-store-mount") + conda_store_mount = Path(z2jh.get_config("custom.conda-store-mount")) default_namespace = z2jh.get_config("custom.default-conda-store-namespace") extra_pod_config = { @@ -164,7 +164,7 @@ def profile_conda_store_mounts(username, groups): extra_container_config = { "volumeMounts": [ { - "mountPath": os.path.join(conda_store_mount, namespace), + "mountPath": str(conda_store_mount / namespace), "name": "conda-store", "subPath": namespace, } @@ -386,7 +386,7 @@ def render_profiles(spawner): # only return the lowest level group name # e.g. /projects/myproj -> myproj # and /developers -> developers - groups = [os.path.basename(_) for _ in auth_state["oauth_user"]["groups"]] + groups = [Path(group).name for group in auth_state["oauth_user"]["groups"]] spawner.log.error(f"user info: {username} {groups}") keycloak_profilenames = auth_state["oauth_user"].get("jupyterlab_profiles", []) diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index e10df357a..b24e70705 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -1,10 +1,10 @@ import json import logging -import pathlib import re import secrets import string from abc import ABC +from pathlib import Path import rich from pydantic.error_wrappers import ValidationError @@ -235,7 +235,7 @@ class Upgrade_0_4_0(UpgradeStep): version = "0.4.0" def _version_specific_upgrade( - self, config, start_version, config_filename: pathlib.Path, *args, **kwargs + self, config, start_version, config_filename: Path, *args, **kwargs ): """ Upgrade to Keycloak. @@ -364,7 +364,7 @@ class Upgrade_0_4_1(UpgradeStep): version = "0.4.1" def _version_specific_upgrade( - self, config, start_version, config_filename: pathlib.Path, *args, **kwargs + self, config, start_version, config_filename: Path, *args, **kwargs ): """ Upgrade jupyterlab profiles. @@ -390,7 +390,7 @@ class Upgrade_2023_4_2(UpgradeStep): version = "2023.4.2" def _version_specific_upgrade( - self, config, start_version, config_filename: pathlib.Path, *args, **kwargs + self, config, start_version, config_filename: Path, *args, **kwargs ): """ Prompt users to delete Argo CRDs diff --git a/src/_nebari/utils.py b/src/_nebari/utils.py index dcc1aa1a5..6e78dd0c0 100644 --- a/src/_nebari/utils.py +++ b/src/_nebari/utils.py @@ -1,13 +1,13 @@ import contextlib import functools import os -import pathlib import re import signal import subprocess import sys import threading import time +from pathlib import Path from typing import Dict, List from ruamel.yaml import YAML @@ -51,7 +51,7 @@ def timer(logger, prefix): @contextlib.contextmanager def change_directory(directory): - current_directory = os.getcwd() + current_directory = Path.cwd() os.chdir(directory) yield os.chdir(current_directory) @@ -175,27 +175,27 @@ def check_cloud_credentials(config): raise ValueError("Cloud Provider configuration not supported") -def load_yaml(config_filename: pathlib.Path): +def load_yaml(config_filename: Path): """ Return yaml dict containing config loaded from config_filename. """ - with config_filename.open() as f: + with open(config_filename) as f: config = yaml.load(f.read()) return config -def backup_config_file(filename: pathlib.Path, extrasuffix: str = ""): +def backup_config_file(filename: Path, extrasuffix: str = ""): if not filename.exists(): return # Backup old file - backup_filename = pathlib.Path(f"{filename}{extrasuffix}.backup") + backup_filename = Path(f"{filename}{extrasuffix}.backup") if backup_filename.exists(): i = 1 while True: - next_backup_filename = pathlib.Path(f"{backup_filename}~{i}") + next_backup_filename = Path(f"{backup_filename}~{i}") if not next_backup_filename.exists(): backup_filename = next_backup_filename break @@ -393,3 +393,15 @@ def set_nebari_dask_version() -> str: return NEBARI_DASK_VERSION return DEFAULT_NEBARI_DASK_VERSION + + +def is_relative_to(self: Path, other: Path, /) -> bool: + """Compatibility function to bring ``Path.is_relative_to`` to Python 3.8""" + if sys.version_info[:2] >= (3, 9): + return self.is_relative_to(other) + + try: + self.relative_to(other) + return True + except ValueError: + return False diff --git a/tests/scripts/minikube-loadbalancer-ip.py b/tests/scripts/minikube-loadbalancer-ip.py index 33a8972f2..54c280729 100755 --- a/tests/scripts/minikube-loadbalancer-ip.py +++ b/tests/scripts/minikube-loadbalancer-ip.py @@ -1,8 +1,8 @@ #!/usr/bin/env python import json -import os import subprocess import sys +from pathlib import Path minikube_cmd = ["minikube", "ssh", "--", "ip", "-j", "a"] minikube_output = subprocess.check_output(minikube_cmd, encoding="utf-8")[:-1] @@ -16,7 +16,7 @@ print("minikube interface eth0 not found") sys.exit(1) -filename = os.path.expanduser("~/.minikube/profiles/minikube/config.json") +filename = Path.home() / ".minikube" / "profiles" / "minikube" / "config.json" with open(filename) as f: data = json.load(f) diff --git a/tests/test_render.py b/tests/test_render.py index 38bbea1eb..7667a3163 100644 --- a/tests/test_render.py +++ b/tests/test_render.py @@ -35,7 +35,7 @@ def write_nebari_config_to_file(setup_fixture): yaml = YAML(typ="unsafe", pure=True) yaml.dump(config, nebari_config_loc) - render_template(str(nebari_config_loc.parent), nebari_config_loc) + render_template(nebari_config_loc.parent, nebari_config_loc) yield setup_fixture