diff --git a/.github/workflows/test_integration.yaml b/.github/workflows/test_integration.yaml new file mode 100644 index 000000000..5607649ea --- /dev/null +++ b/.github/workflows/test_integration.yaml @@ -0,0 +1,52 @@ +name: "Deploy on Digital Ocean" + +on: + schedule: + - cron: "0 0 * * MON" + workflow_dispatch: + +jobs: + test-integration: + name: "Pytest Integration" + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + steps: + - name: "Checkout Infrastructure" + uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.11 + + - name: Retrieve secret from Vault + uses: hashicorp/vault-action@v2.5.0 + with: + method: jwt + url: "https://quansight-vault-public-vault-b2379fa7.d415e30e.z1.hashicorp.cloud:8200" + namespace: "admin/quansight" + role: "repository-nebari-dev-nebari-role" + secrets: | + kv/data/repository/nebari-dev/nebari/amazon_web_services/nebari-dev-ci role_name | AWS_ROLE_ARN; + kv/data/repository/nebari-dev/nebari/google_cloud_platform/nebari-dev-ci/github-nebari-dev-repo-ci project_id | PROJECT_ID; + kv/data/repository/nebari-dev/nebari/google_cloud_platform/nebari-dev-ci/github-nebari-dev-repo-ci workload_identity_provider | GCP_WORKFLOW_PROVIDER; + kv/data/repository/nebari-dev/nebari/google_cloud_platform/nebari-dev-ci/github-nebari-dev-repo-ci service_account_name | GCP_SERVICE_ACCOUNT; + kv/data/repository/nebari-dev/nebari/shared_secrets DIGITALOCEAN_TOKEN | DIGITALOCEAN_TOKEN; + kv/data/repository/nebari-dev/nebari/cloudflare/internal-devops@quansight.com/nebari-dev-ci token | CLOUDFLARE_TOKEN; + + - name: Install Nebari + run: | + pip install .[dev] + conda install --quiet --yes conda-build + + - name: Integration Tests + run: | + pytest --version + pytest tests_integration/ -vvv -s + env: + NEBARI_K8S_VERSION: 1.25.12-do.0 + SPACES_ACCESS_KEY_ID: ${{ secrets.SPACES_ACCESS_KEY_ID }} + SPACES_SECRET_ACCESS_KEY: ${{ secrets.SPACES_SECRET_ACCESS_KEY }} diff --git a/.gitignore b/.gitignore index d7c6ef5a6..f3128025d 100644 --- a/.gitignore +++ b/.gitignore @@ -52,3 +52,6 @@ nebari-config.yaml .ipynb_checkpoints .DS_Store /.ruff_cache + +# Integration tests deployments +_test_deploy diff --git a/src/_nebari/constants.py b/src/_nebari/constants.py index cc8f8e776..2f79a6273 100644 --- a/src/_nebari/constants.py +++ b/src/_nebari/constants.py @@ -5,7 +5,7 @@ # 04-kubernetes-ingress DEFAULT_TRAEFIK_IMAGE_TAG = "2.9.1" -HIGHEST_SUPPORTED_K8S_VERSION = "1.24.13" +HIGHEST_SUPPORTED_K8S_VERSION = "1.25.12" DEFAULT_GKE_RELEASE_CHANNEL = "UNSPECIFIED" DEFAULT_NEBARI_DASK_VERSION = CURRENT_RELEASE diff --git a/src/_nebari/deploy.py b/src/_nebari/deploy.py index 8dbd62013..0dcd951af 100644 --- a/src/_nebari/deploy.py +++ b/src/_nebari/deploy.py @@ -256,6 +256,7 @@ def guided_install( print( "Additional administration docs can be found at https://docs.nebari.dev/en/stable/source/admin_guide/" ) + return stage_outputs def deploy_configuration( @@ -293,7 +294,7 @@ def deploy_configuration( with timer(logger, "deploying Nebari"): try: - guided_install( + return guided_install( config, dns_provider, dns_auto_provision, @@ -302,5 +303,6 @@ def deploy_configuration( skip_remote_state_provision, ) except subprocess.CalledProcessError as e: + logger.error("subprocess command failed") logger.error(e.output) raise e diff --git a/src/_nebari/provider/cloud/digital_ocean.py b/src/_nebari/provider/cloud/digital_ocean.py index 94b4d3dd0..fd55672f1 100644 --- a/src/_nebari/provider/cloud/digital_ocean.py +++ b/src/_nebari/provider/cloud/digital_ocean.py @@ -44,6 +44,9 @@ def regions(): def kubernetes_versions(region): """Return list of available kubernetes supported by cloud provider. Sorted from oldest to latest.""" supported_kubernetes_versions = sorted( - [_["slug"] for _ in _kubernetes_options()["options"]["versions"]] + [_["slug"].split("-")[0] for _ in _kubernetes_options()["options"]["versions"]] ) - return filter_by_highest_supported_k8s_version(supported_kubernetes_versions) + filtered_versions = filter_by_highest_supported_k8s_version( + supported_kubernetes_versions + ) + return [f"{v}-do.0" for v in filtered_versions] diff --git a/src/_nebari/stages/checks.py b/src/_nebari/stages/checks.py index 795bce550..86259846a 100644 --- a/src/_nebari/stages/checks.py +++ b/src/_nebari/stages/checks.py @@ -198,7 +198,8 @@ def _attempt_keycloak_connection( ) print(f"Attempt {i+1} succeeded connecting to keycloak master realm") return True - except KeycloakError: + except KeycloakError as e: + print(e) print(f"Attempt {i+1} failed connecting to keycloak master realm") time.sleep(timeout) return False diff --git a/src/_nebari/utils.py b/src/_nebari/utils.py index 6e78dd0c0..4ca07f82b 100644 --- a/src/_nebari/utils.py +++ b/src/_nebari/utils.py @@ -106,6 +106,7 @@ def kill_process(): if timeout_timer is not None: timeout_timer.cancel() + process.stdout.close() return process.wait( timeout=10 ) # Should already have finished because we have drained stdout diff --git a/tests/conftest.py b/tests/conftest.py index cbcef549f..65abddf33 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,33 +1,8 @@ -from functools import partial from unittest.mock import Mock import pytest -from _nebari.initialize import render_config - -INIT_INPUTS = [ - # project, namespace, domain, cloud_provider, ci_provider, auth_provider - ("pytestdo", "dev", "do.nebari.dev", "do", "github-actions", "github"), - ("pytestaws", "dev", "aws.nebari.dev", "aws", "github-actions", "github"), - ("pytestgcp", "dev", "gcp.nebari.dev", "gcp", "github-actions", "github"), - ("pytestazure", "dev", "azure.nebari.dev", "azure", "github-actions", "github"), -] - -NEBARI_CONFIG_FN = "nebari-config.yaml" -PRESERVED_DIR = "preserved_dir" -DEFAULT_GH_REPO = "github.com/test/test" -DEFAULT_TERRAFORM_STATE = "remote" - - -# use this partial function for all tests that need to call `render_config` -render_config_partial = partial( - render_config, - repository=DEFAULT_GH_REPO, - repository_auto_provision=False, - auth_auto_provision=False, - terraform_state=DEFAULT_TERRAFORM_STATE, - disable_prompt=True, -) +from tests.utils import INIT_INPUTS, NEBARI_CONFIG_FN, PRESERVED_DIR @pytest.fixture(params=INIT_INPUTS) diff --git a/tests/test_init.py b/tests/test_init.py index 69a007fea..a64d511fc 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -1,6 +1,6 @@ import pytest -from .conftest import render_config_partial +from .utils import render_config_partial @pytest.mark.parametrize( diff --git a/tests/test_render.py b/tests/test_render.py index 7667a3163..2ec7f407a 100644 --- a/tests/test_render.py +++ b/tests/test_render.py @@ -6,7 +6,7 @@ from _nebari.render import render_template, set_env_vars_in_config -from .conftest import PRESERVED_DIR, render_config_partial +from .utils import PRESERVED_DIR, render_config_partial @pytest.fixture diff --git a/tests/test_schema.py b/tests/test_schema.py index ea4d0d329..d4d8cf878 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -1,6 +1,6 @@ import _nebari.schema -from .conftest import render_config_partial +from .utils import render_config_partial def test_schema(setup_fixture): diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 000000000..82dffdcd3 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,25 @@ +from functools import partial + +from _nebari.initialize import render_config + +DEFAULT_TERRAFORM_STATE = "remote" + +DEFAULT_GH_REPO = "github.com/test/test" +render_config_partial = partial( + render_config, + repository=DEFAULT_GH_REPO, + repository_auto_provision=False, + auth_auto_provision=False, + terraform_state=DEFAULT_TERRAFORM_STATE, + disable_prompt=True, +) +INIT_INPUTS = [ + # project, namespace, domain, cloud_provider, ci_provider, auth_provider + ("pytestdo", "dev", "do.nebari.dev", "do", "github-actions", "github"), + ("pytestaws", "dev", "aws.nebari.dev", "aws", "github-actions", "github"), + ("pytestgcp", "dev", "gcp.nebari.dev", "gcp", "github-actions", "github"), + ("pytestazure", "dev", "azure.nebari.dev", "azure", "github-actions", "github"), +] + +NEBARI_CONFIG_FN = "nebari-config.yaml" +PRESERVED_DIR = "preserved_dir" diff --git a/tests_e2e/playwright/navigator.py b/tests_e2e/playwright/navigator.py index d60ffdde6..5bf0d3efe 100644 --- a/tests_e2e/playwright/navigator.py +++ b/tests_e2e/playwright/navigator.py @@ -1,12 +1,10 @@ import contextlib import datetime as dt import logging -import os import re import time import urllib -import dotenv from playwright.sync_api import expect, sync_playwright logger = logging.getLogger() @@ -410,20 +408,3 @@ def write_file(self, filepath, content): self.run_terminal_command(f"ls {filepath}") logger.debug(f"time to complete {dt.datetime.now() - start}") time.sleep(2) - - -if __name__ == "__main__": - dotenv.load_dotenv() - nav = Navigator( - nebari_url="https://nebari.quansight.dev/", - username=os.environ["KEYCLOAK_USERNAME"], - password=os.environ["KEYCLOAK_PASSWORD"], - auth="password", - instance_name="small-instance", - headless=False, - slow_mo=100, - ) - nav.login() - nav.start_server() - nav.reset_workspace() - nav.teardown() diff --git a/tests_e2e/playwright/run_notebook.py b/tests_e2e/playwright/run_notebook.py index 351f266de..12d2e2038 100644 --- a/tests_e2e/playwright/run_notebook.py +++ b/tests_e2e/playwright/run_notebook.py @@ -1,22 +1,18 @@ import contextlib import logging -import os from pathlib import Path -import dotenv from navigator import Navigator logger = logging.getLogger() -class RunNotebook: +class Notebook: def __init__(self, navigator: Navigator): self.nav = navigator self.nav.initialize - def run_notebook( - self, path, expected_output_text, conda_env, runtime=30000, retry=2 - ): + def run(self, path, expected_output_text, conda_env, runtime=30000, retry=2): """Run jupyter notebook and check for expected output text anywhere on the page. @@ -84,32 +80,3 @@ def _restart_run_all(self): ) if restart_dialog_button.is_visible(): restart_dialog_button.click() - - -if __name__ == "__main__": - dotenv.load_dotenv() - nav = Navigator( - nebari_url="https://nebari.quansight.dev/", - username=os.environ["KEYCLOAK_USERNAME"], - password=os.environ["KEYCLOAK_PASSWORD"], - auth="password", - instance_name="small-instance", - headless=False, - slow_mo=100, - ) - nav.login() - nav.start_server() - nav.reset_workspace() - test_app = RunNotebook(navigator=nav) - notebook_filepath_in_repo = "test_data/test_notebook_output.ipynb" - notebook_filepath_on_nebari = "test_notebook_output.ipynb" - with open(notebook_filepath_in_repo, "r") as notebook: - test_app.nav.write_file( - filepath=notebook_filepath_on_nebari, content=notebook.read() - ) - test_app.run_notebook( - path="nebari/tests_e2e/playwright/test_data/test_notebook_output.ipynb", - expected_output_text="success: 6", - conda_env="conda-env-default-py", - ) - nav.teardown() diff --git a/tests_e2e/playwright/test_playwright.py b/tests_e2e/playwright/test_playwright.py index ff636904b..264243fd9 100644 --- a/tests_e2e/playwright/test_playwright.py +++ b/tests_e2e/playwright/test_playwright.py @@ -1,12 +1,12 @@ -from run_notebook import RunNotebook +from run_notebook import Notebook def test_notebook(navigator, test_data_root): - test_app = RunNotebook(navigator=navigator) + test_app = Notebook(navigator=navigator) notebook_name = "test_notebook_output.ipynb" with open(test_data_root / notebook_name, "r") as notebook: test_app.nav.write_file(filepath=notebook_name, content=notebook.read()) - test_app.run_notebook( + test_app.run( path=notebook_name, expected_output_text="success: 6", conda_env="conda-env-default-py", diff --git a/tests_integration/README.md b/tests_integration/README.md new file mode 100644 index 000000000..6735ae4ea --- /dev/null +++ b/tests_integration/README.md @@ -0,0 +1,26 @@ +# Integration Testing via Pytest + +These tests are designed to test things on Nebari deployed +on cloud. At the moment it only deploys on DigitalOcean. + +You need the following environment variables to run these. + +```bash +DIGITALOCEAN_TOKEN +NEBARI_K8S_VERSION +SPACES_ACCESS_KEY_ID +SPACES_SECRET_ACCESS_KEY +CLOUDFLARE_TOKEN +``` + +For instructions on how to get these variables check the documentation +for DigitalOcean deployment. + +Running Tests: + +```bash +pytest tests_integration -vvv -s +``` + +This would deploy on digitalocean, run tests on the deployment +and then teardown the cluster. diff --git a/tests_integration/__init__.py b/tests_integration/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests_integration/conftest.py b/tests_integration/conftest.py new file mode 100644 index 000000000..6a64a20ab --- /dev/null +++ b/tests_integration/conftest.py @@ -0,0 +1 @@ +pytest_plugins = ["tests_integration.deployment_fixtures"] diff --git a/tests_integration/deployment_fixtures.py b/tests_integration/deployment_fixtures.py new file mode 100644 index 000000000..bbf7190a5 --- /dev/null +++ b/tests_integration/deployment_fixtures.py @@ -0,0 +1,95 @@ +import logging +import os +import random +import string +import warnings +from pathlib import Path + +import pytest +import yaml +from urllib3.exceptions import InsecureRequestWarning + +from _nebari.deploy import deploy_configuration +from _nebari.destroy import destroy_configuration +from _nebari.render import render_template +from tests.utils import render_config_partial + +DEPLOYMENT_DIR = "_test_deploy" + +logger = logging.getLogger(__name__) + + +def ignore_warnings(): + # Ignore this for now, as test is failing due to a + # DeprecationWarning and InsecureRequestWarning + warnings.filterwarnings("ignore", category=DeprecationWarning) + warnings.filterwarnings("ignore", category=InsecureRequestWarning) + + +def _random_letters(length=5): + letters = string.ascii_letters + return "".join(random.choice(letters) for _ in range(length)).lower() + + +def _get_or_create_deployment_directory(cloud): + """This will create a directory to initialise and deploy + Nebari from. + """ + deployment_dirs = list(Path(Path(DEPLOYMENT_DIR) / cloud).glob(f"pytest{cloud}*")) + if deployment_dirs: + deployment_dir = deployment_dirs[0] + else: + project_name = f"pytest{cloud}{_random_letters()}" + deployment_dir = Path(Path(Path(DEPLOYMENT_DIR) / cloud) / project_name) + deployment_dir.mkdir(parents=True) + return deployment_dir + + +def _set_do_environment(): + os.environ["AWS_ACCESS_KEY_ID"] = os.environ["SPACES_ACCESS_KEY_ID"] + os.environ["AWS_SECRET_ACCESS_KEY"] = os.environ["SPACES_SECRET_ACCESS_KEY"] + + +@pytest.fixture(scope="session") +def deploy(request): + """Deploy Nebari on the given cloud, currently only DigitalOcean""" + ignore_warnings() + cloud = request.param + _set_do_environment() + deployment_dir = _get_or_create_deployment_directory(cloud) + config = render_config_partial( + project_name=deployment_dir.name, + namespace="dev", + nebari_domain=f"ci-{cloud}.nebari.dev", + cloud_provider=cloud, + ci_provider="github-actions", + auth_provider="github", + ) + deployment_dir_abs = deployment_dir.absolute() + os.chdir(deployment_dir) + logger.info(f"Temporary directory: {deployment_dir}") + with open(Path("nebari-config.yaml"), "w") as f: + yaml.dump(config, f) + render_template(deployment_dir_abs, Path("nebari-config.yaml")) + try: + yield deploy_configuration( + config=config, + dns_provider="cloudflare", + dns_auto_provision=True, + disable_prompt=True, + disable_checks=False, + skip_remote_state_provision=False, + ) + except Exception as e: + logger.info(f"Deploy Failed, Exception: {e}") + logger.exception(e) + logger.info("Tearing down") + return _destroy(config) + + +def _destroy(config): + destroy_configuration(config) + + +def on_cloud(param): + return pytest.mark.parametrize("deploy", [param], indirect=True) diff --git a/tests_integration/test_integration.py b/tests_integration/test_integration.py new file mode 100644 index 000000000..7ea18dfc2 --- /dev/null +++ b/tests_integration/test_integration.py @@ -0,0 +1,62 @@ +import pytest +import requests + +from tests_integration.deployment_fixtures import ignore_warnings, on_cloud + + +@pytest.fixture(autouse=True) +def disable_warnings(): + ignore_warnings() + + +@on_cloud("do") +def test_do_service_status(deploy): + """Tests if deployment on DigitalOcean succeeds""" + service_urls = deploy["stages/07-kubernetes-services"]["service_urls"]["value"] + assert ( + requests.get(service_urls["jupyterhub"]["health_url"], verify=False).status_code + == 200 + ) + assert ( + requests.get(service_urls["keycloak"]["health_url"], verify=False).status_code + == 200 + ) + assert ( + requests.get( + service_urls["dask_gateway"]["health_url"], verify=False + ).status_code + == 200 + ) + assert ( + requests.get( + service_urls["conda_store"]["health_url"], verify=False + ).status_code + == 200 + ) + assert ( + requests.get(service_urls["monitoring"]["health_url"], verify=False).status_code + == 200 + ) + + +@on_cloud("do") +def test_verify_keycloak_users(deploy): + """Tests if keycloak is working and it has expected users""" + keycloak_credentials = deploy["stages/05-kubernetes-keycloak"][ + "keycloak_credentials" + ]["value"] + from keycloak import KeycloakAdmin + + keycloak_admin = KeycloakAdmin( + server_url=f"{keycloak_credentials['url']}/auth/", + username=keycloak_credentials["username"], + password=keycloak_credentials["password"], + realm_name=keycloak_credentials["realm"], + client_id=keycloak_credentials["client_id"], + verify=False, + ) + assert set([u["username"] for u in keycloak_admin.get_users()]) == { + "nebari-bot", + "read-only-user", + "root", + }