diff --git a/.github/workflows/build_documentation.yaml b/.github/workflows/build_documentation.yaml index 8243cfdc97..5350075540 100644 --- a/.github/workflows/build_documentation.yaml +++ b/.github/workflows/build_documentation.yaml @@ -46,7 +46,7 @@ jobs: run: hatch run docs:build - name: Link Checker - uses: lycheeverse/lychee-action@v1.10.0 + uses: lycheeverse/lychee-action@v2.0.2 with: args: --config='./.lychee.toml' --no-progress './docs/build/html/**/*.html' fail: true # fail on broken links diff --git a/.github/workflows/test_code.yaml b/.github/workflows/test_code.yaml index 52bce85ef6..0119816136 100644 --- a/.github/workflows/test_code.yaml +++ b/.github/workflows/test_code.yaml @@ -55,7 +55,7 @@ jobs: shell: bash run: npm install -g markdown-link-check - name: Link Checker - uses: lycheeverse/lychee-action@v1.10.0 + uses: lycheeverse/lychee-action@v2.0.2 with: args: --config='./.lychee.toml' --no-progress --offline '**/*.md' --exclude-path './docs' fail: true # fail on broken links diff --git a/.hatch/requirements-docs.txt b/.hatch/requirements-docs.txt index 9f3cdcf9fa..3f9d070931 100644 --- a/.hatch/requirements-docs.txt +++ b/.hatch/requirements-docs.txt @@ -5,7 +5,7 @@ # - myst-parser==4.0.0 # - pydata-sphinx-theme==0.15.4 # - sphinx-togglebutton==0.3.2 -# - sphinx==8.0.2 +# - sphinx==8.1.3 # accessible-pygments==0.0.5 @@ -20,7 +20,7 @@ beautifulsoup4==4.12.3 # via pydata-sphinx-theme certifi==2024.8.30 # via requests -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests docutils==0.21.2 # via @@ -42,7 +42,7 @@ markdown-it-py==3.0.0 # via # mdit-py-plugins # myst-parser -markupsafe==2.1.5 +markupsafe==3.0.2 # via jinja2 mdit-py-plugins==0.4.2 # via myst-parser @@ -69,7 +69,7 @@ snowballstemmer==2.2.0 # via sphinx soupsieve==2.6 # via beautifulsoup4 -sphinx==8.0.2 +sphinx==8.1.3 # via # hatch.envs.docs # myst-parser diff --git a/.hatch/requirements-lint.txt b/.hatch/requirements-lint.txt index 0a0e48f6c4..1e636503c1 100644 --- a/.hatch/requirements-lint.txt +++ b/.hatch/requirements-lint.txt @@ -2,10 +2,10 @@ # This file is autogenerated by hatch-pip-compile with Python 3.12 # # - ansible-dev-tools==24.9.0 -# - ansible==10.4.0 -# - black==24.8.0 +# - ansible==10.5.0 +# - black==24.10.0 # - mypy==1.11.2 -# - pandas-stubs==2.2.2.240909 +# - pandas-stubs==2.2.3.241009 # - pydantic==2.9.2 # - ruff==0.6.9 # - types-appdirs==1.4.3.5 @@ -17,7 +17,7 @@ annotated-types==0.7.0 # via pydantic -ansible==10.4.0 +ansible==10.5.0 # via hatch.envs.lint ansible-builder==3.1.0 # via @@ -29,14 +29,14 @@ ansible-compat==24.9.1 # ansible-lint # molecule # pytest-ansible -ansible-core==2.17.4 +ansible-core==2.17.5 # via # ansible # ansible-compat # ansible-lint # molecule # pytest-ansible -ansible-creator==24.9.0 +ansible-creator==24.10.1 # via ansible-dev-tools ansible-dev-environment==24.9.0 # via ansible-dev-tools @@ -58,7 +58,7 @@ attrs==24.2.0 # referencing bindep==2.11.0 # via ansible-builder -black==24.8.0 +black==24.10.0 # via # hatch.envs.lint # ansible-lint @@ -81,9 +81,9 @@ click-help-colors==0.9.4 # via molecule colorama==0.4.6 # via tox -cryptography==43.0.1 +cryptography==43.0.3 # via ansible-core -distlib==0.3.8 +distlib==0.3.9 # via # ansible-sign # virtualenv @@ -117,13 +117,13 @@ jsonschema==4.23.0 # ansible-lint # ansible-navigator # molecule -jsonschema-specifications==2023.12.1 +jsonschema-specifications==2024.10.1 # via jsonschema lockfile==0.12.2 # via python-daemon markdown-it-py==3.0.0 # via rich -markupsafe==2.1.5 +markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py @@ -153,7 +153,7 @@ packaging==24.1 # pytest # pytest-ansible # tox -pandas-stubs==2.2.2.240909 +pandas-stubs==2.2.3.241009 # via hatch.envs.lint parsley==1.3 # via bindep @@ -233,7 +233,7 @@ rpds-py==0.20.0 # referencing ruamel-yaml==0.18.6 # via ansible-lint -ruamel-yaml-clib==0.2.8 +ruamel-yaml-clib==0.2.12 # via ruamel-yaml ruff==0.6.9 # via hatch.envs.lint @@ -242,7 +242,7 @@ subprocess-tee==0.4.2 # ansible-compat # ansible-dev-environment # ansible-lint -tox==4.21.2 +tox==4.23.0 # via tox-ansible tox-ansible==24.9.0 # via ansible-dev-tools @@ -267,7 +267,7 @@ tzdata==2024.2 # via ansible-navigator urllib3==2.2.3 # via types-requests -virtualenv==20.26.6 +virtualenv==20.27.0 # via tox wcmatch==10.0 # via diff --git a/.hatch/requirements-test.txt b/.hatch/requirements-test.txt index edfd96b29a..a14759b25b 100644 --- a/.hatch/requirements-test.txt +++ b/.hatch/requirements-test.txt @@ -1,11 +1,11 @@ # # This file is autogenerated by hatch-pip-compile with Python 3.12 # -# [constraints] .hatch/requirements.txt (SHA256: f892a9714607641735b83f480e2c234b2ab8e1dffd2d59ad4188c887c06b24de) +# [constraints] .hatch/requirements.txt (SHA256: 12cb2eff6268d97a3d9d63d3ec5d670c6f13a571befda8d279cb7ce6ab9f5bb5) # # - appdirs==1.4.4 # - azure-core==1.31.0 -# - azure-identity==1.18.0 +# - azure-identity==1.19.0 # - azure-keyvault-certificates==4.8.0 # - azure-keyvault-keys==4.9.0 # - azure-keyvault-secrets==4.8.0 @@ -19,15 +19,15 @@ # - azure-mgmt-storage==21.2.1 # - azure-storage-blob==12.23.1 # - azure-storage-file-datalake==12.17.0 -# - azure-storage-file-share==12.18.0 +# - azure-storage-file-share==12.19.0 # - chevron==0.14.0 # - cryptography==43.0.1 # - fqdn==1.5.1 # - psycopg[binary]==3.1.19 -# - pulumi-azure-native==2.64.3 -# - pulumi-azuread==5.53.4 +# - pulumi-azure-native==2.66.0 +# - pulumi-azuread==6.0.0 # - pulumi-random==4.16.6 -# - pulumi==3.135.1 +# - pulumi==3.136.1 # - pydantic==2.9.2 # - pyjwt[crypto]==2.9.0 # - pytz==2024.2 @@ -36,7 +36,7 @@ # - simple-acme-dns==3.1.0 # - typer==0.12.5 # - websocket-client==1.8.0 -# - coverage==7.6.1 +# - coverage==7.6.3 # - freezegun==1.5.1 # - pytest-mock==3.14.0 # - pytest==8.3.3 @@ -87,7 +87,7 @@ azure-core==1.31.0 # azure-storage-file-datalake # azure-storage-file-share # msrest -azure-identity==1.18.0 +azure-identity==1.19.0 # via # -c .hatch/requirements.txt # hatch.envs.test @@ -155,7 +155,7 @@ azure-storage-file-datalake==12.17.0 # via # -c .hatch/requirements.txt # hatch.envs.test -azure-storage-file-share==12.18.0 +azure-storage-file-share==12.19.0 # via # -c .hatch/requirements.txt # hatch.envs.test @@ -168,7 +168,7 @@ cffi==1.17.1 # via # -c .hatch/requirements.txt # cryptography -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via # -c .hatch/requirements.txt # requests @@ -180,7 +180,7 @@ click==8.1.7 # via # -c .hatch/requirements.txt # typer -coverage==7.6.1 +coverage==7.6.3 # via hatch.envs.test cryptography==43.0.1 # via @@ -195,7 +195,7 @@ cryptography==43.0.1 # msal # pyjwt # pyopenssl -debugpy==1.8.6 +debugpy==1.8.7 # via # -c .hatch/requirements.txt # pulumi @@ -213,7 +213,7 @@ fqdn==1.5.1 # hatch.envs.test freezegun==1.5.1 # via hatch.envs.test -grpcio==1.60.2 +grpcio==1.66.2 # via # -c .hatch/requirements.txt # pulumi @@ -223,7 +223,7 @@ idna==3.10 # requests iniconfig==2.0.0 # via pytest -isodate==0.6.1 +isodate==0.7.2 # via # -c .hatch/requirements.txt # azure-keyvault-certificates @@ -295,18 +295,18 @@ psycopg-binary==3.1.19 # via # -c .hatch/requirements.txt # psycopg -pulumi==3.135.1 +pulumi==3.136.1 # via # -c .hatch/requirements.txt # hatch.envs.test # pulumi-azure-native # pulumi-azuread # pulumi-random -pulumi-azure-native==2.64.3 +pulumi-azure-native==2.66.0 # via # -c .hatch/requirements.txt # hatch.envs.test -pulumi-azuread==5.53.4 +pulumi-azuread==6.0.0 # via # -c .hatch/requirements.txt # hatch.envs.test @@ -402,7 +402,6 @@ six==1.16.0 # via # -c .hatch/requirements.txt # azure-core - # isodate # pulumi # python-dateutil typer==0.12.5 diff --git a/.hatch/requirements.txt b/.hatch/requirements.txt index 79631de17d..eb2f71c0e8 100644 --- a/.hatch/requirements.txt +++ b/.hatch/requirements.txt @@ -3,7 +3,7 @@ # # - appdirs==1.4.4 # - azure-core==1.31.0 -# - azure-identity==1.18.0 +# - azure-identity==1.19.0 # - azure-keyvault-certificates==4.8.0 # - azure-keyvault-keys==4.9.0 # - azure-keyvault-secrets==4.8.0 @@ -17,15 +17,15 @@ # - azure-mgmt-storage==21.2.1 # - azure-storage-blob==12.23.1 # - azure-storage-file-datalake==12.17.0 -# - azure-storage-file-share==12.18.0 +# - azure-storage-file-share==12.19.0 # - chevron==0.14.0 # - cryptography==43.0.1 # - fqdn==1.5.1 # - psycopg[binary]==3.1.19 -# - pulumi-azure-native==2.64.3 -# - pulumi-azuread==5.53.4 +# - pulumi-azure-native==2.66.0 +# - pulumi-azuread==6.0.0 # - pulumi-random==4.16.6 -# - pulumi==3.135.1 +# - pulumi==3.136.1 # - pydantic==2.9.2 # - pyjwt[crypto]==2.9.0 # - pytz==2024.2 @@ -68,7 +68,7 @@ azure-core==1.31.0 # azure-storage-file-datalake # azure-storage-file-share # msrest -azure-identity==1.18.0 +azure-identity==1.19.0 # via hatch.envs.default azure-keyvault-certificates==4.8.0 # via hatch.envs.default @@ -108,7 +108,7 @@ azure-storage-blob==12.23.1 # azure-storage-file-datalake azure-storage-file-datalake==12.17.0 # via hatch.envs.default -azure-storage-file-share==12.18.0 +azure-storage-file-share==12.19.0 # via hatch.envs.default certifi==2024.8.30 # via @@ -116,7 +116,7 @@ certifi==2024.8.30 # requests cffi==1.17.1 # via cryptography -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests chevron==0.14.0 # via hatch.envs.default @@ -134,7 +134,7 @@ cryptography==43.0.1 # msal # pyjwt # pyopenssl -debugpy==1.8.6 +debugpy==1.8.7 # via pulumi dill==0.3.9 # via pulumi @@ -142,11 +142,11 @@ dnspython==2.6.1 # via simple-acme-dns fqdn==1.5.1 # via hatch.envs.default -grpcio==1.60.2 +grpcio==1.66.2 # via pulumi idna==3.10 # via requests -isodate==0.6.1 +isodate==0.7.2 # via # azure-keyvault-certificates # azure-keyvault-keys @@ -192,15 +192,15 @@ psycopg==3.1.19 # via hatch.envs.default psycopg-binary==3.1.19 # via psycopg -pulumi==3.135.1 +pulumi==3.136.1 # via # hatch.envs.default # pulumi-azure-native # pulumi-azuread # pulumi-random -pulumi-azure-native==2.64.3 +pulumi-azure-native==2.66.0 # via hatch.envs.default -pulumi-azuread==5.53.4 +pulumi-azuread==6.0.0 # via hatch.envs.default pulumi-random==4.16.6 # via hatch.envs.default @@ -257,7 +257,6 @@ simple-acme-dns==3.1.0 six==1.16.0 # via # azure-core - # isodate # pulumi typer==0.12.5 # via hatch.envs.default diff --git a/.lychee.toml b/.lychee.toml index 7902f32ad0..2265ab5386 100644 --- a/.lychee.toml +++ b/.lychee.toml @@ -90,6 +90,7 @@ glob_ignore_case = false # Exclude URLs and mail addresses from checking (supports regex). # exclude = [ '.*\.github.com\.*' ] exclude = [ + 'code\.visualstudio\.com', # 403 'doi\.org', # 403 'entra.microsoft\.com', # Requires authentication (403) 'example\.org', # domain used for examples only diff --git a/data_safe_haven/commands/config.py b/data_safe_haven/commands/config.py index 830cda32ee..a774868516 100644 --- a/data_safe_haven/commands/config.py +++ b/data_safe_haven/commands/config.py @@ -19,11 +19,9 @@ DataSafeHavenAzureStorageError, DataSafeHavenConfigError, DataSafeHavenError, - DataSafeHavenPulumiError, DataSafeHavenTypeError, ) from data_safe_haven.external.api.azure_sdk import AzureSdk -from data_safe_haven.infrastructure import SREProjectManager from data_safe_haven.logging import get_logger from data_safe_haven.serialisers import ContextBase @@ -97,31 +95,12 @@ def available() -> None: logger.info(f"No configurations found for context '{context.name}'.") raise typer.Exit(0) + config_names = [blob.removeprefix("sre-").removesuffix(".yaml") for blob in blobs] pulumi_config = DSHPulumiConfig.from_remote(context) - sre_status = {} - for blob in blobs: - sre_config = SREConfig.from_remote_by_name( - context, blob.removeprefix("sre-").removesuffix(".yaml") - ) - stack = SREProjectManager( - context=context, - config=sre_config, - pulumi_config=pulumi_config, - create_project=True, - ) - - try: - sre_status[sre_config.name] = ( - "No output values" not in stack.run_pulumi_command("stack output") - ) - except DataSafeHavenPulumiError as exc: - logger.error( - f"Failed to run Pulumi command querying stack outputs for SRE '{sre_config.name}'." - ) - raise typer.Exit(1) from exc + deployed = pulumi_config.project_names headers = ["SRE Name", "Deployed"] - rows = [[name, "x" if deployed else ""] for name, deployed in sre_status.items()] + rows = [[name, "x" if name in deployed else ""] for name in config_names] console.print(f"Available SRE configurations for context '{context.name}':") console.tabulate(headers, rows) @@ -215,7 +194,11 @@ def upload( else: logger.critical(f"Configuration file '{file}' not found.") raise typer.Exit(1) - config = SREConfig.from_yaml(config_yaml) + try: + config = SREConfig.from_yaml(config_yaml) + except DataSafeHavenTypeError as exc: + logger.error("Check for missing or incorrect fields in the configuration.") + raise typer.Exit(1) from exc # Present diff to user if (not force) and SREConfig.remote_exists(context, filename=config.filename): diff --git a/data_safe_haven/config/config_sections.py b/data_safe_haven/config/config_sections.py index 252c94e7d0..35b9570a7e 100644 --- a/data_safe_haven/config/config_sections.py +++ b/data_safe_haven/config/config_sections.py @@ -10,6 +10,7 @@ from data_safe_haven.types import ( AzureLocation, AzurePremiumFileShareSize, + AzureServiceTag, AzureVmSku, DatabaseSystem, EmailAddress, @@ -58,7 +59,7 @@ class ConfigSectionSRE(BaseModel, validate_assignment=True): databases: UniqueList[DatabaseSystem] = [] data_provider_ip_addresses: list[IpAddress] = [] remote_desktop: ConfigSubsectionRemoteDesktopOpts - research_user_ip_addresses: list[IpAddress] = [] + research_user_ip_addresses: list[IpAddress] | AzureServiceTag = [] storage_quota_gb: ConfigSubsectionStorageQuotaGB software_packages: SoftwarePackageCategory = SoftwarePackageCategory.NONE timezone: TimeZone = "Etc/UTC" @@ -67,7 +68,7 @@ class ConfigSectionSRE(BaseModel, validate_assignment=True): @field_validator( "admin_ip_addresses", "data_provider_ip_addresses", - "research_user_ip_addresses", + # "research_user_ip_addresses", mode="after", ) @classmethod @@ -78,3 +79,16 @@ def ensure_non_overlapping(cls, v: list[IpAddress]) -> list[IpAddress]: msg = "IP addresses must not overlap." raise ValueError(msg) return v + + @field_validator( + "research_user_ip_addresses", + mode="after", + ) + @classmethod + def ensure_non_overlapping_or_tag( + cls, v: list[IpAddress] | AzureServiceTag + ) -> list[IpAddress] | AzureServiceTag: + if isinstance(v, list): + return cls.ensure_non_overlapping(v) + else: + return v diff --git a/data_safe_haven/config/sre_config.py b/data_safe_haven/config/sre_config.py index f4ee5ed6c9..9fba89e12f 100644 --- a/data_safe_haven/config/sre_config.py +++ b/data_safe_haven/config/sre_config.py @@ -98,7 +98,10 @@ def template(cls: type[Self], tier: int | None = None) -> SREConfig: allow_copy=remote_desktop_allow_copy, allow_paste=remote_desktop_allow_paste, ), - research_user_ip_addresses=["List of IP addresses belonging to users"], + research_user_ip_addresses=[ + "List of IP addresses belonging to users", + "You can also use the tag 'Internet' instead of a list", + ], software_packages=software_packages, storage_quota_gb=ConfigSubsectionStorageQuotaGB.model_construct( home="Total size in GiB across all home directories [minimum: 100].", # type: ignore diff --git a/data_safe_haven/infrastructure/programs/sre/networking.py b/data_safe_haven/infrastructure/programs/sre/networking.py index 42e1345c2d..e6c308f587 100644 --- a/data_safe_haven/infrastructure/programs/sre/networking.py +++ b/data_safe_haven/infrastructure/programs/sre/networking.py @@ -12,7 +12,7 @@ get_id_from_vnet, get_name_from_vnet, ) -from data_safe_haven.types import NetworkingPriorities, Ports +from data_safe_haven.types import AzureServiceTag, NetworkingPriorities, Ports class SRENetworkingProps: @@ -31,7 +31,7 @@ def __init__( shm_subscription_id: Input[str], shm_zone_name: Input[str], sre_name: Input[str], - user_public_ip_ranges: Input[list[str]], + user_public_ip_ranges: Input[list[str]] | AzureServiceTag, ) -> None: # Other variables self.dns_private_zones = dns_private_zones @@ -68,6 +68,13 @@ def __init__( child_opts = ResourceOptions.merge(opts, ResourceOptions(parent=self)) child_tags = {"component": "networking"} | (tags if tags else {}) + if isinstance(props.user_public_ip_ranges, list): + user_public_ip_ranges = props.user_public_ip_ranges + user_service_tag = None + else: + user_public_ip_ranges = None + user_service_tag = props.user_public_ip_ranges + # Define route table route_table = network.RouteTable( f"{self._name}_route_table", @@ -125,7 +132,8 @@ def __init__( name="AllowUsersInternetInbound", priority=NetworkingPriorities.AUTHORISED_EXTERNAL_USER_IPS, protocol=network.SecurityRuleProtocol.TCP, - source_address_prefixes=props.user_public_ip_ranges, + source_address_prefix=user_service_tag, + source_address_prefixes=user_public_ip_ranges, source_port_range="*", ), network.SecurityRuleArgs( diff --git a/data_safe_haven/logging/plain_file_handler.py b/data_safe_haven/logging/plain_file_handler.py index c41d0e5ffc..ffce4a551c 100644 --- a/data_safe_haven/logging/plain_file_handler.py +++ b/data_safe_haven/logging/plain_file_handler.py @@ -16,16 +16,23 @@ def __init__(self, *args: Any, **kwargs: Any): super().__init__(*args, **kwargs) @staticmethod - def strip_formatting(input_string: str) -> str: + def strip_rich_formatting(input_string: str) -> str: """Strip console markup formatting from a string""" text = Text.from_markup(input_string) text.spans = [] return str(text) + @staticmethod + def strip_ansi_escapes(input_string: str) -> str: + """Strip ANSI escape sequences from a string""" + text = Text.from_ansi(input_string) + text.spans = [] + return str(text) + def emit(self, record: logging.LogRecord) -> None: """Emit a record without formatting""" if isinstance(record.msg, Text): # Convert rich.text.Text objects to strings record.msg = str(record.msg) - record.msg = self.strip_formatting(record.msg) + record.msg = self.strip_ansi_escapes(self.strip_rich_formatting(record.msg)) super().emit(record) diff --git a/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/gitea.desktop b/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/gitea.desktop deleted file mode 100644 index 184609d263..0000000000 --- a/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/gitea.desktop +++ /dev/null @@ -1,6 +0,0 @@ -[Desktop Entry] -Version=1.0 -Type=Link -Name=Gitea -Comment= -Icon=/usr/local/share/icons/gitea.png diff --git a/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/hedgedoc.desktop b/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/hedgedoc.desktop deleted file mode 100644 index 6a1c2b68c0..0000000000 --- a/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/hedgedoc.desktop +++ /dev/null @@ -1,6 +0,0 @@ -[Desktop Entry] -Version=1.0 -Type=Link -Name=HedgeDoc -Comment= -Icon=/usr/local/share/icons/hedgedoc.png diff --git a/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/input.desktop b/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/input.desktop index 97e64b5b95..9d94e38319 100644 --- a/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/input.desktop +++ b/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/input.desktop @@ -4,4 +4,4 @@ Type=Link Name=input Comment= Icon=drive-removable-media -URL=/data +URL=/mnt/input diff --git a/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/output.desktop b/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/output.desktop index 4dc474784a..e34950528e 100644 --- a/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/output.desktop +++ b/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/output.desktop @@ -4,4 +4,4 @@ Type=Link Name=output Comment= Icon=drive-removable-media -URL=/output +URL=/mnt/output diff --git a/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/shared.desktop b/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/shared.desktop index 3e4e97fde7..27552b271d 100644 --- a/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/shared.desktop +++ b/data_safe_haven/resources/workspace/ansible/files/etc/skel/Desktop/shared.desktop @@ -4,4 +4,4 @@ Type=Link Name=shared Comment= Icon=drive-removable-media -URL=/shared +URL=/mnt/shared diff --git a/data_safe_haven/serialisers/yaml_serialisable_model.py b/data_safe_haven/serialisers/yaml_serialisable_model.py index 3423260fd5..1ab5a031f8 100644 --- a/data_safe_haven/serialisers/yaml_serialisable_model.py +++ b/data_safe_haven/serialisers/yaml_serialisable_model.py @@ -54,7 +54,7 @@ def from_yaml(cls: type[T], settings_yaml: str) -> T: ) for error in exc.errors(): logger.error( - f"[red]{'.'.join(map(str, error.get('loc', [])))}: {error.get('input', '')}[/] - {error.get('msg', '')}" + f"{error.get('msg', '')}: [red]{'.'.join(map(str, error.get('loc', [])))}.[/] Original input: [red]{error.get('input', '')}[/]" ) msg = f"{cls.config_type} configuration is invalid." raise DataSafeHavenTypeError(msg) from exc diff --git a/data_safe_haven/types/__init__.py b/data_safe_haven/types/__init__.py index 471fb56656..4f2f89b3be 100644 --- a/data_safe_haven/types/__init__.py +++ b/data_safe_haven/types/__init__.py @@ -15,6 +15,7 @@ from .enums import ( AzureDnsZoneNames, AzureSdkCredentialScope, + AzureServiceTag, DatabaseSystem, FirewallPriorities, ForbiddenDomains, @@ -29,6 +30,7 @@ "AzureDnsZoneNames", "AzureLocation", "AzurePremiumFileShareSize", + "AzureServiceTag", "AzureSdkCredentialScope", "AzureSubscriptionName", "AzureVmSku", diff --git a/data_safe_haven/types/enums.py b/data_safe_haven/types/enums.py index 170cbba4a0..35465f260e 100644 --- a/data_safe_haven/types/enums.py +++ b/data_safe_haven/types/enums.py @@ -26,6 +26,11 @@ class AzureSdkCredentialScope(str, Enum): KEY_VAULT = "https://vault.azure.net" +@verify(UNIQUE) +class AzureServiceTag(str, Enum): + INTERNET = "Internet" + + @verify(UNIQUE) class DatabaseSystem(str, Enum): MICROSOFT_SQL_SERVER = "mssql" diff --git a/data_safe_haven/validators/validators.py b/data_safe_haven/validators/validators.py index dd4458ec57..27507d26b4 100644 --- a/data_safe_haven/validators/validators.py +++ b/data_safe_haven/validators/validators.py @@ -124,7 +124,7 @@ def ip_address(ip_address: str) -> str: try: return str(ipaddress.ip_network(ip_address)) except Exception as exc: - msg = "Expected valid IPv4 address, for example '1.1.1.1'." + msg = "Expected valid IPv4 address, for example '1.1.1.1', or 'Internet'." raise ValueError(msg) from exc diff --git a/docs/source/deployment/deploy_sre.md b/docs/source/deployment/deploy_sre.md index d4161ea579..2a1e5511a7 100644 --- a/docs/source/deployment/deploy_sre.md +++ b/docs/source/deployment/deploy_sre.md @@ -69,6 +69,48 @@ sre: :::: +:::{admonition} Supported Azure regions +:class: dropdown important + +Some of the SRE resources are not available in all Azure regions. + +- Workspace virtual machines use zone redundant storage managed disks which have [limited regional availability](https://learn.microsoft.com/en-us/azure/virtual-machines/disks-redundancy). +- Some shares mounted on workspace virtual machines require premium file shares which have [limited regional availability](https://learn.microsoft.com/en-us/azure/storage/files/redundancy-premium-file-shares). + +The regions which satisfy all requirements are, + +- Australia East +- Brazil South +- Canada Central +- Central India +- China North 3 +- East Asia +- East US +- East US 2 +- France Central +- Germany West Central +- Israel Central +- Italy North +- Japan East +- Korea Central +- North Europe +- Norway East +- Poland Central +- Qatar Central +- South Africa North +- South Central US +- Southeast Asia +- Sweden Central +- Switzerland North +- UAE North +- UK South +- US Gov Virginia +- West Europe +- West US 2 +- West US 3 + +::: + :::{hint} See [here](https://learn.microsoft.com/en-us/azure/virtual-machines/sizes/) for a full list of valid Azure VM SKUs. ::: diff --git a/docs/source/management/index.md b/docs/source/management/index.md index 931d5e003a..e4f8406ab7 100644 --- a/docs/source/management/index.md +++ b/docs/source/management/index.md @@ -87,6 +87,27 @@ If you have manually created a user and want to enable SSPR, do the following - **Email:** enter the user's email address here - Click the **{guilabel}`Save`** icon in the top panel +## Listing available SRE configurations and deployment status + +- Run the following if you want to check what SRE configurations are available in the current context, and whether those SREs are deployed + +```{code} shell +$ dsh config available +``` + +which will give output like the following + +```{code} shell +Available SRE configurations for context 'green': +┏━━━━━━━━━━━━━━┳━━━━━━━━━━┓ +┃ SRE Name ┃ Deployed ┃ +┡━━━━━━━━━━━━━━╇━━━━━━━━━━┩ +│ emerald │ x │ +│ jade │ │ +│ olive │ │ +└──────────────┴──────────┘ +``` + ## Removing a deployed Data Safe Haven - Run the following if you want to teardown a deployed SRE: diff --git a/pyproject.toml b/pyproject.toml index 1c7ac9839a..d444befc3d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ license = { text = "BSD-3-Clause" } dependencies = [ "appdirs==1.4.4", "azure-core==1.31.0", - "azure-identity==1.18.0", + "azure-identity==1.19.0", "azure-keyvault-certificates==4.8.0", "azure-keyvault-keys==4.9.0", "azure-keyvault-secrets==4.8.0", @@ -40,15 +40,15 @@ dependencies = [ "azure-mgmt-storage==21.2.1", "azure-storage-blob==12.23.1", "azure-storage-file-datalake==12.17.0", - "azure-storage-file-share==12.18.0", + "azure-storage-file-share==12.19.0", "chevron==0.14.0", "cryptography==43.0.1", "fqdn==1.5.1", "psycopg[binary]==3.1.19", # needed for installation on older MacOS versions - "pulumi-azure-native==2.64.3", - "pulumi-azuread==5.53.4", + "pulumi-azure-native==2.66.0", + "pulumi-azuread==6.0.0", "pulumi-random==4.16.6", - "pulumi==3.135.1", + "pulumi==3.136.1", "pydantic==2.9.2", "pyjwt[crypto]==2.9.0", "pytz==2024.2", @@ -70,14 +70,14 @@ docs = [ "myst-parser==4.0.0", "pydata-sphinx-theme==0.15.4", "sphinx-togglebutton==0.3.2", - "sphinx==8.0.2", + "sphinx==8.1.3", ] lint = [ "ansible-dev-tools==24.9.0", - "ansible==10.4.0", - "black==24.8.0", + "ansible==10.5.0", + "black==24.10.0", "mypy==1.11.2", - "pandas-stubs==2.2.2.240909", + "pandas-stubs==2.2.3.241009", "pydantic==2.9.2", "ruff==0.6.9", "types-appdirs==1.4.3.5", @@ -87,7 +87,7 @@ lint = [ "types-requests==2.32.0.20240914", ] test = [ - "coverage==7.6.1", + "coverage==7.6.3", "freezegun==1.5.1", "pytest-mock==3.14.0", "pytest==8.3.3", diff --git a/tests/commands/test_config_sre.py b/tests/commands/test_config_sre.py index 134705ae20..7460a908eb 100644 --- a/tests/commands/test_config_sre.py +++ b/tests/commands/test_config_sre.py @@ -331,3 +331,16 @@ def test_upload_invalid_config_force( context.storage_account_name, context.storage_container_name, ) + + def test_upload_missing_field( + self, runner, tmp_path, sre_config_yaml_missing_field + ): + config_file_path = tmp_path / "config.yaml" + with open(config_file_path, "w") as f: + f.write(sre_config_yaml_missing_field) + + result = runner.invoke(config_command_group, ["upload", str(config_file_path)]) + + assert result.exit_code == 1 + assert "validation errors" in result.stdout + assert "Check for missing" in result.stdout diff --git a/tests/config/test_config_sections.py b/tests/config/test_config_sections.py index 0363e41e38..6528b130fa 100644 --- a/tests/config/test_config_sections.py +++ b/tests/config/test_config_sections.py @@ -9,7 +9,11 @@ ConfigSubsectionRemoteDesktopOpts, ConfigSubsectionStorageQuotaGB, ) -from data_safe_haven.types import DatabaseSystem, SoftwarePackageCategory +from data_safe_haven.types import ( + AzureServiceTag, + DatabaseSystem, + SoftwarePackageCategory, +) class TestConfigSectionAzure: @@ -184,6 +188,24 @@ def test_ip_overlap_research_user(self): research_user_ip_addresses=["1.2.3.4", "1.2.3.4"], ) + def test_research_user_tag_internet( + self, + config_subsection_remote_desktop: ConfigSubsectionRemoteDesktopOpts, + config_subsection_storage_quota_gb: ConfigSubsectionStorageQuotaGB, + ): + sre_config = ConfigSectionSRE( + admin_email_address="admin@example.com", + remote_desktop=config_subsection_remote_desktop, + storage_quota_gb=config_subsection_storage_quota_gb, + research_user_ip_addresses="Internet", + ) + assert isinstance(sre_config.research_user_ip_addresses, AzureServiceTag) + assert sre_config.research_user_ip_addresses == "Internet" + + def test_research_user_tag_invalid(self): + with pytest.raises(ValueError, match="Input should be 'Internet'"): + ConfigSectionSRE(research_user_ip_addresses="Not a tag") + @pytest.mark.parametrize( "addresses", [ diff --git a/tests/conftest.py b/tests/conftest.py index 4626f1061e..5a8ce42847 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -534,6 +534,12 @@ def sre_config_yaml(request): return yaml.dump(yaml.safe_load(content)) +@fixture +def sre_config_yaml_missing_field(sre_config_yaml): + content = sre_config_yaml.replace("admin_email_address: admin@example.com", "") + return yaml.dump(yaml.safe_load(content)) + + @fixture def sre_project_manager( context_no_secrets, diff --git a/tests/logging/test_plain_file_handler.py b/tests/logging/test_plain_file_handler.py index a2bf60ad81..90f7c6ca70 100644 --- a/tests/logging/test_plain_file_handler.py +++ b/tests/logging/test_plain_file_handler.py @@ -1,6 +1,15 @@ +import pytest + from data_safe_haven.logging.plain_file_handler import PlainFileHandler class TestPlainFileHandler: - def test_strip_formatting(self): - assert PlainFileHandler.strip_formatting("[green]hello[/]") == "hello" + def test_strip_rich_formatting(self): + assert PlainFileHandler.strip_rich_formatting("[green]Hello[/]") == "Hello" + + @pytest.mark.parametrize("escape", ["\033", "\x1B", "\u001b", "\x1B"]) + def test_strip_ansi_escapes(self, escape): + assert ( + PlainFileHandler.strip_ansi_escapes(f"{escape}[31;1;4mHello{escape}[0m") + == "Hello" + ) diff --git a/tests/serialisers/test_yaml_serialisable_model.py b/tests/serialisers/test_yaml_serialisable_model.py index ecf4a2ca52..8850123bb1 100644 --- a/tests/serialisers/test_yaml_serialisable_model.py +++ b/tests/serialisers/test_yaml_serialisable_model.py @@ -68,15 +68,24 @@ def test_from_yaml_not_dict(self): ): ExampleYAMLSerialisableModel.from_yaml(yaml) - def test_from_yaml_validation_error(self): + def test_from_yaml_validation_errors(self, caplog): yaml = "\n".join( - ["string: 'abc'", "integer: 'not an integer'", "list_of_integers: [-1,0,1]"] + [ + "string: 'abc'", + "integer: 'not an integer'", + "list_of_integers: [-1,0,z,1]", + ] ) with raises( DataSafeHavenTypeError, match="Example configuration is invalid.", ): ExampleYAMLSerialisableModel.from_yaml(yaml) + assert "Input should be a valid integer" in caplog.text + assert "Original input: not an integer" in caplog.text + assert "unable to parse string as an integer" in caplog.text + assert "list_of_integers.2" in caplog.text + assert "Original input: z" in caplog.text def test_to_filepath(self, tmp_path, example_config_class): filepath = tmp_path / "test.yaml" diff --git a/tests/validators/test_validators.py b/tests/validators/test_validators.py index 1c38e551f8..18d2fd31b5 100644 --- a/tests/validators/test_validators.py +++ b/tests/validators/test_validators.py @@ -86,6 +86,36 @@ def test_fqdn_fail(self, fqdn): validators.fqdn(fqdn) +class TestValidateIpAddress: + @pytest.mark.parametrize( + "ip_address,output", + [ + ("127.0.0.1", "127.0.0.1/32"), + ("0.0.0.0/0", "0.0.0.0/0"), + ("192.168.171.1/32", "192.168.171.1/32"), + ], + ) + def test_ip_address(self, ip_address, output): + assert validators.ip_address(ip_address) == output + + @pytest.mark.parametrize( + "ip_address", + [ + "example.com", + "University of Life", + "999.999.999.999", + "0.0.0.0/-1", + "255.255.255.0/2", + ], + ) + def test_ip_address_fail(self, ip_address): + with pytest.raises( + ValueError, + match="Expected valid IPv4 address, for example '1.1.1.1', or 'Internet'.", + ): + validators.ip_address(ip_address) + + class TestValidateSafeString: @pytest.mark.parametrize( "safe_string",