diff --git a/.github/ISSUE_TEMPLATE/bug.yaml b/.github/ISSUE_TEMPLATE/bug.yaml index 702c0a95e..d3ed25e1a 100644 --- a/.github/ISSUE_TEMPLATE/bug.yaml +++ b/.github/ISSUE_TEMPLATE/bug.yaml @@ -45,7 +45,7 @@ body: If possible, please paste your charmcraft.yaml contents. This will be automatically formatted into code, so no need for backticks. - render: shell + render: yaml validations: required: true - type: textarea diff --git a/.github/renovate.json5 b/.github/renovate.json5 index b1019279d..a5d18da70 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -5,7 +5,9 @@ // Each ignore is probably connected with an ignore in pyproject.toml. // Ensure you change this and those simultaneously. "urllib3", + // Temporary until we remove Windows. https://github.com/canonical/charmcraft/issues/1810 "windows", // We'll update Windows versions manually. + "tox-gh", // As of 1.3.2 tox-gh doesn't support Windows 2019's python 3.7. ], labels: ["dependencies"], // For convenient searching in GitHub baseBranches: ["$default", "/^hotfix\\/.*/"], @@ -38,7 +40,11 @@ // Automerge patches, pin changes and digest changes. // Also groups these changes together. groupName: "bugfixes", - excludeDepPatterns: ["lint/.*", "types/.*"], + excludeDepPatterns: [ + "lint/.*", + "types/.*", + "pyright", // Pyright needs to be done separately. + ], matchUpdateTypes: ["patch", "pin", "digest"], prPriority: 3, // Patches should go first! automerge: true diff --git a/.github/workflows/publish-pypi.yaml b/.github/workflows/publish-pypi.yaml index 7640d89ea..6f34f4ddb 100644 --- a/.github/workflows/publish-pypi.yaml +++ b/.github/workflows/publish-pypi.yaml @@ -14,6 +14,9 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true - uses: actions/setup-python@v5 with: python-version: '3.12' diff --git a/.github/workflows/security-scan.yaml b/.github/workflows/security-scan.yaml new file mode 100644 index 000000000..d0254ca2d --- /dev/null +++ b/.github/workflows/security-scan.yaml @@ -0,0 +1,20 @@ +name: Security scan +on: + pull_request: + push: + branches: + - main + - hotfix/* + - work/secscan # For development + +jobs: + python-scans: + name: Scan Python project + uses: canonical/starflow/.github/workflows/scan-python.yaml@main + with: + packages: python-apt-dev + # 1. requirements-noble.txt can't build on jammy + # 2. Ignore requirements files in spread tests, as some of these intentionally + # contain vulnerable versions. + requirements-find-args: '! -name requirements-noble.txt ! -path "./tests/spread/*"' + osv-extra-args: '--config=source/osv-scanner.toml' diff --git a/.github/workflows/spread.yaml b/.github/workflows/spread.yaml index 454ea5b57..f1179c0d5 100644 --- a/.github/workflows/spread.yaml +++ b/.github/workflows/spread.yaml @@ -85,6 +85,7 @@ jobs: name: Run spread env: CHARMCRAFT_AUTH: ${{ secrets.CHARMCRAFT_AUTH }} + CHARMCRAFT_SINGLE_CHARM_AUTH: ${{ secrets.CHARMCRAFT_SINGLE_CHARM_AUTH }} CHARM_DEFAULT_NAME: gh-ci-charmcraft-charm BUNDLE_DEFAULT_NAME: gh-ci-charmcraft-bundle run: | diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 33f2ae18f..0f26affc6 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -45,7 +45,7 @@ jobs: run-tests: strategy: matrix: - os: [ubuntu-22.04, ubuntu-24.04, macos-12, macos-13, windows-2019, windows-2022] + os: [ubuntu-22.04, ubuntu-24.04, macos-13, macos-14-large, windows-2019, windows-2022] include: - os: windows-2019 python-version: | @@ -55,14 +55,16 @@ jobs: python-version: | 3.11 3.12 - - os: macos-12 - python_version: | + - os: macos-14-large + python-version: | 3.10 3.12 + 3.13 - os: macos-13 - python_version: | + python-version: | 3.10 3.12 + 3.13 runs-on: ${{ matrix.os }} steps: - name: Checkout code @@ -80,6 +82,14 @@ jobs: run: | sudo apt update sudo apt install -y python3-pip python3-setuptools python3-wheel python3-venv libapt-pkg-dev + pipx install poetry + # Jammy runners have too old a version of pip. + if [[ $(lsb_release --codename --short) == 'jammy' ]]; then + python3 -m pip install -U pip + fi + - name: Setup LXD + uses: canonical/setup-lxd@v0.1.1 + if: ${{ runner.os == 'Linux' }} - name: Install skopeo (mac) # This is only necessary for Linux until skopeo >= 1.11 is in repos. # Once we're running on Noble, we can get skopeo from apt. @@ -122,7 +132,7 @@ jobs: with: fetch-depth: 0 - name: Build snap - uses: snapcore/action-build@v1 + uses: canonical/action-build@v1 id: snapcraft - name: Upload snap artifact uses: actions/upload-artifact@v4 @@ -147,7 +157,7 @@ jobs: fi - name: Publish feature branch to edge/${{ steps.vars.outputs.branch }} if: ${{ env.SNAPCRAFT_STORE_CREDENTIALS != '' }} - uses: snapcore/action-publish@v1 + uses: canonical/action-publish@v1 env: SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.SNAPCRAFT_STORE_CREDENTIALS }} with: @@ -288,7 +298,7 @@ jobs: macos-smoke-test: strategy: matrix: - os: [macos-12, macos-13] + os: [macos-13, macos-14-large] runs-on: ${{ matrix.os }} steps: # Installing and caching homebrew using the action should speed up subsequent CI: @@ -306,8 +316,9 @@ jobs: - name: Install Homebrew Bundler RubyGems if: steps.cache.outputs.cache-hit != 'true' run: brew install-bundler-gems - - name: Install Multipass + - name: Install dependencies with homebrew run: | + brew install libgit2@1.7 # For building pygit2 brew install multipass - name: Checkout code uses: actions/checkout@v4 @@ -318,7 +329,7 @@ jobs: cache: 'pip' - name: Build and install Charmcraft run: | - pipx install . + pip install -r requirements.txt . - name: Check for fully-configured multipass run: | while ! multipass version; do diff --git a/.github/workflows/tics.yaml b/.github/workflows/tics.yaml index dcc0580c3..ea5cdafa2 100644 --- a/.github/workflows/tics.yaml +++ b/.github/workflows/tics.yaml @@ -25,7 +25,8 @@ jobs: sudo apt-get install -y python3 python3-dev libapt-pkg-dev libyaml-dev echo "::endgroup::" echo "::group::pip install" - python -m pip install 'tox<5.0' tox-gh + python -m pip install 'tox<5.0' tox-gh poetry + pip install -U pip echo "::endgroup::" eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)" diff --git a/.gitignore b/.gitignore index dc4412ea1..6866d3f6f 100644 --- a/.gitignore +++ b/.gitignore @@ -141,6 +141,7 @@ dmypy.json *~ /charmcraft/_version.py /results/ +.*.*swp # Spread files .spread-reuse*.yaml diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8490e5b03..336ab81b8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,12 +1,17 @@ ## Development environment +We recommend uv for setting up your local development environment: + +- [uv snap](https://snapcraft.io/astral-uv) +- [Official uv binary](https://docs.astral.sh/uv/getting-started/installation/) + To set up an initial development environment: git clone https://github.com/canonical/charmcraft.git cd charmcraft - virtualenv venv - . venv/bin/activate - pip install -r requirements-dev.txt -e . + uv venv + . .venv/bin/activate + uv pip install -r requirements-dev.txt -e . You will need a copy of `ruff` installed. On many Linux distributions, you can install ruff with: @@ -15,7 +20,7 @@ can install ruff with: Otherwise, you can install ruff in your virtual environment with: - pip install ruff + uv tool install ruff ## Developing against Charmcraft source @@ -31,7 +36,7 @@ When you're done, make sure you run the tests. You can do so with - pip install -r requirements-dev.txt + uv pip install -r requirements-dev.txt ./run_tests Contributions welcome! diff --git a/charmcraft/application/commands/__init__.py b/charmcraft/application/commands/__init__.py index 1ceff40b9..4364050af 100644 --- a/charmcraft/application/commands/__init__.py +++ b/charmcraft/application/commands/__init__.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Charmcraft commands.""" + import craft_application from charmcraft.application.commands.analyse import Analyse, Analyze @@ -109,7 +110,8 @@ def fill_command_groups(app: craft_application.Application) -> None: ], ) app.add_command_group( - "Extensions", [ExpandExtensionsCommand, ExtensionsCommand, ListExtensionsCommand] + "Extensions", + [ExpandExtensionsCommand, ExtensionsCommand, ListExtensionsCommand], ) app.add_command_group( "Other", diff --git a/charmcraft/application/commands/analyse.py b/charmcraft/application/commands/analyse.py index 8b160efe8..fefa706b7 100644 --- a/charmcraft/application/commands/analyse.py +++ b/charmcraft/application/commands/analyse.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Command for analysing a charm.""" + import argparse import json import pathlib diff --git a/charmcraft/application/commands/base.py b/charmcraft/application/commands/base.py index cb88c4fed..72fe8ae46 100644 --- a/charmcraft/application/commands/base.py +++ b/charmcraft/application/commands/base.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Base command for Charmcraft commands.""" + from __future__ import annotations import craft_application.commands diff --git a/charmcraft/application/commands/extensions.py b/charmcraft/application/commands/extensions.py index bb80e95df..5ec436565 100644 --- a/charmcraft/application/commands/extensions.py +++ b/charmcraft/application/commands/extensions.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """Infrastructure for the 'extensions' command.""" + import argparse from textwrap import dedent diff --git a/charmcraft/application/commands/init.py b/charmcraft/application/commands/init.py index 35345041d..c6c39650c 100644 --- a/charmcraft/application/commands/init.py +++ b/charmcraft/application/commands/init.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """Infrastructure for the 'init' command.""" + import argparse import os import pathlib @@ -39,6 +40,7 @@ "flask-framework": "init-flask-framework", "django-framework": "init-django-framework", "go-framework": "init-go-framework", + "fastapi-framework": "init-fastapi-framework", } DEFAULT_PROFILE = "simple" @@ -129,7 +131,9 @@ class InitCommand(base.CharmcraftCommand): def fill_parser(self, parser): """Specify command's specific parameters.""" - parser.add_argument("--name", help="The name of the charm; defaults to the directory name") + parser.add_argument( + "--name", help="The name of the charm; defaults to the directory name" + ) parser.add_argument( "--author", help="The charm author; defaults to the current user name per GECOS", diff --git a/charmcraft/application/commands/lifecycle.py b/charmcraft/application/commands/lifecycle.py index baae8a652..9e322bf81 100644 --- a/charmcraft/application/commands/lifecycle.py +++ b/charmcraft/application/commands/lifecycle.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """craft-application based lifecycle commands.""" + from __future__ import annotations import pathlib @@ -171,10 +172,46 @@ def run_managed(self, parsed_args: argparse.Namespace) -> bool: # Always use a runner on non-Linux platforms. # Craft-parts is not designed to work on non-posix platforms, and most # notably here, the bundle plugin doesn't work on Windows. - if sys.platform == "linux" and charmcraft_yaml and charmcraft_yaml.get("type") == "bundle": + if ( + sys.platform == "linux" + and charmcraft_yaml + and charmcraft_yaml.get("type") == "bundle" + ): return False + return super().run_managed(parsed_args) + def _update_charm_libs(self) -> None: + """Update charm libs attached to the project.""" + craft_cli.emit.progress( + "Checking that charmlibs match 'charmcraft.yaml' values" + ) + project = cast(models.CharmcraftProject, self._services.project) + libs_svc = cast(services.CharmLibsService, self._services.charm_libs) + installable_libs: list[models.CharmLib] = [] + for lib in project.charm_libs: + library_name = utils.QualifiedLibraryName.from_string(lib.lib) + if not libs_svc.get_local_version( + charm_name=library_name.charm_name, lib_name=library_name.lib_name + ): + installable_libs.append(lib) + if installable_libs: + store = cast(services.StoreService, self._services.store) + libraries_md = store.get_libraries_metadata(installable_libs) + with craft_cli.emit.progress_bar( + "Downloading charmlibs...", len(installable_libs) + ) as progress: + for library in libraries_md: + craft_cli.emit.debug(repr(library)) + lib_contents = store.get_library( + library.charm_name, + library_id=library.lib_id, + api=library.api, + patch=library.patch, + ) + libs_svc.write(lib_contents) + progress.advance(1) + def _run( self, parsed_args: argparse.Namespace, @@ -182,4 +219,9 @@ def _run( **kwargs: Any, # noqa: ANN401 (allow dynamic typing) ) -> None: self._validate_args(parsed_args) + + project = cast(models.CharmcraftProject, self._services.project) + if project.charm_libs: + self._update_charm_libs() + return super()._run(parsed_args, step_name, **kwargs) diff --git a/charmcraft/application/commands/remote.py b/charmcraft/application/commands/remote.py index 5ed3318be..ac606dec2 100644 --- a/charmcraft/application/commands/remote.py +++ b/charmcraft/application/commands/remote.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Build a charm remotely on Launchpad.""" + import argparse import os import pathlib @@ -63,7 +64,9 @@ class RemoteBuild(ExtensibleCommand): @override def _fill_parser(self, parser: argparse.ArgumentParser) -> None: - parser.add_argument("--recover", action="store_true", help="recover an interrupted build") + parser.add_argument( + "--recover", action="store_true", help="recover an interrupted build" + ) parser.add_argument( "--launchpad-accept-public-upload", action="store_true", @@ -121,7 +124,9 @@ def _run(self, parsed_args: argparse.Namespace, **kwargs: Any) -> int | None: # emit.progress(f"Recovering build {build_id}") builds = builder.resume_builds(build_id) else: - emit.progress("Starting new build. It may take a while to upload large projects.") + emit.progress( + "Starting new build. It may take a while to upload large projects." + ) builds = builder.start_builds(project_dir) try: @@ -138,7 +143,9 @@ def _run(self, parsed_args: argparse.Namespace, **kwargs: Any) -> int | None: # builder.cleanup() return returncode - def _monitor_and_complete(self, build_id: str | None, builds: Collection[Build]) -> int: + def _monitor_and_complete( + self, build_id: str | None, builds: Collection[Build] + ) -> int: builder = self._services.remote_build emit.progress("Monitoring build") try: @@ -168,10 +175,14 @@ def _monitor_and_complete(self, build_id: str | None, builds: Collection[Build]) emit.progress("; ".join(progress_parts)) except TimeoutError: if build_id: - resume_command = f"{self._app.name} remote-build --recover --build-id={build_id}" + resume_command = ( + f"{self._app.name} remote-build --recover --build-id={build_id}" + ) else: resume_command = f"{self._app.name} remote-build --recover" - emit.message(f"Timed out waiting for build.\nTo resume, run {resume_command!r}") + emit.message( + f"Timed out waiting for build.\nTo resume, run {resume_command!r}" + ) return 75 # Temporary failure emit.progress(f"Fetching {len(builds)} build logs...") diff --git a/charmcraft/application/commands/store.py b/charmcraft/application/commands/store.py index ed30c4b32..cfa24f267 100644 --- a/charmcraft/application/commands/store.py +++ b/charmcraft/application/commands/store.py @@ -15,9 +15,11 @@ # For further info, check https://github.com/canonical/charmcraft """Commands related to Charmhub.""" + import argparse import collections import dataclasses +import datetime import os import pathlib import re @@ -31,7 +33,6 @@ from operator import attrgetter from typing import TYPE_CHECKING, Any -import craft_platforms import yaml from craft_application import util from craft_cli import ArgumentParsingError, emit @@ -69,7 +70,10 @@ class _ResourceType(typing.NamedTuple): EntityType = _EntityType() ResourceType = _ResourceType() # the list of valid attenuations to restrict login credentials -VALID_ATTENUATIONS = {getattr(attenuations, x) for x in dir(attenuations) if x.isupper()} +VALID_ATTENUATIONS = { + getattr(attenuations, x) for x in dir(attenuations) if x.isupper() +} +BUNDLE_REGISTRATION_REMOVAL_URL = "https://discourse.charmhub.io/t/15344" class LoginCommand(CharmcraftCommand): @@ -161,7 +165,9 @@ def run(self, parsed_args): """Run the command.""" # validate that restrictions are only used if credentials are exported restrictive_options = ["charm", "bundle", "channel", "permission", "ttl"] - if any(getattr(parsed_args, option) is not None for option in restrictive_options): + if any( + getattr(parsed_args, option) is not None for option in restrictive_options + ): if parsed_args.export is None: raise ArgumentParsingError( "The restrictive options 'bundle', 'channel', 'charm', 'permission' or 'ttl' " @@ -175,7 +181,9 @@ def run(self, parsed_args): "Explore the documentation to learn about valid permissions: " "https://juju.is/docs/sdk/remote-env-auth" ) - raise CraftError(f"Invalid permission: {invalid_text}.", details=details) + raise CraftError( + f"Invalid permission: {invalid_text}.", details=details + ) # restrictive options, mapping the names between what is used in Namespace (singular, # even if it ends up being a list) and the more natural ones used in the Store layer @@ -190,14 +198,20 @@ def run(self, parsed_args): kwargs[arg_name] = namespace_value packages = ( - utils.get_packages(charms=parsed_args.charm or [], bundles=parsed_args.bundle or []) + utils.get_packages( + charms=parsed_args.charm or [], bundles=parsed_args.bundle or [] + ) or None ) if parsed_args.export: - credentials = self._services.store.get_credentials(packages=packages, **kwargs) + credentials = self._services.store.get_credentials( + packages=packages, **kwargs + ) parsed_args.export.write_text(credentials) - emit.message(f"Login successful. Credentials exported to {str(parsed_args.export)!r}.") + emit.message( + f"Login successful. Credentials exported to {str(parsed_args.export)!r}." + ) else: self._services.store.login(packages=packages, **kwargs) username = self._services.store.get_account_info()["username"] @@ -245,7 +259,7 @@ class WhoamiCommand(CharmcraftCommand): ) format_option = True - def run(self, parsed_args): + def run(self, parsed_args: argparse.Namespace) -> None: """Run the command.""" try: macaroon_info = self._services.store.client.whoami() @@ -258,7 +272,7 @@ def run(self, parsed_args): return human_msgs = [] - prog_info = {"logged": True} + prog_info: dict[str, Any] = {"logged": True} human_msgs.append(f"name: {macaroon_info['account']['display-name']}") prog_info["name"] = macaroon_info["account"]["display-name"] @@ -274,20 +288,20 @@ def run(self, parsed_args): prog_info["permissions"] = permissions if packages := macaroon_info.get("packages"): - grouped = {} + grouped: dict[str, list[dict[str, str]]] = {} for package in packages: - grouped.setdefault(package.type, []).append(package) + grouped.setdefault(package["type"], []).append(package) for package_type, title in [("charm", "charms"), ("bundle", "bundles")]: if package_type in grouped: human_msgs.append(f"{title}:") pkg_info = [] for item in grouped[package_type]: - if item.name is not None: - human_msgs.append(f"- name: {item.name}") - pkg_info.append({"name": item.name}) - elif item.id is not None: - human_msgs.append(f"- id: {item.id}") - pkg_info.append({"id": item.id}) + if (name := item.get("name")) is not None: + human_msgs.append(f"- name: {name}") + pkg_info.append({"name": name}) + elif (pkg_id := item.get("id")) is not None: + human_msgs.append(f"- id: {pkg_id}") + pkg_info.append({"id": pkg_id}) prog_info[title] = pkg_info if channels := macaroon_info.get("channels"): @@ -340,7 +354,9 @@ def run(self, parsed_args): """Run the command.""" store = Store(env.get_store_config()) store.register_name(parsed_args.name, EntityType.charm) - emit.message(f"You are now the publisher of charm {parsed_args.name!r} in Charmhub.") + emit.message( + f"You are now the publisher of charm {parsed_args.name!r} in Charmhub." + ) class RegisterBundleNameCommand(CharmcraftCommand): @@ -349,7 +365,7 @@ class RegisterBundleNameCommand(CharmcraftCommand): name = "register-bundle" help_msg = "Register a bundle name in the Store" overview = textwrap.dedent( - """ + f""" Register a bundle name in the Store. Claim a name for your bundle in Charmhub. Once you have registered @@ -368,18 +384,37 @@ class RegisterBundleNameCommand(CharmcraftCommand): https://discourse.charmhub.io/c/charm Registration will take you through login if needed. + + \u001b[31mWARNING:\u001b[0m Charmhub will stop accepting new bundle registrations on 2024-11-01. + For more information, see: + {BUNDLE_REGISTRATION_REMOVAL_URL} """ ) - def fill_parser(self, parser): + def fill_parser(self, parser: argparse.ArgumentParser): """Add own parameters to the general parser.""" parser.add_argument("name", help="The name to register in Charmhub") - def run(self, parsed_args): + def run(self, parsed_args: argparse.Namespace) -> int: """Run the command.""" + if datetime.date.today() >= datetime.date(2024, 11, 1): + emit.message( + "\u001b[31mERROR:\u001b[0m New bundle registration is discontinued as of 2024-11-01. For more " + f"information, see: {BUNDLE_REGISTRATION_REMOVAL_URL}" + ) + return 1 + emit.progress( + "\u001b[31mWARNING:\u001b[0m New bundle registration will stop working on 2024-11-01. For " + f"more information, see: {BUNDLE_REGISTRATION_REMOVAL_URL}", + permanent=True, + ) store = Store(env.get_store_config()) store.register_name(parsed_args.name, EntityType.bundle) - emit.message(f"You are now the publisher of bundle {parsed_args.name!r} in Charmhub.") + emit.message( + f"You are now the publisher of bundle {parsed_args.name!r} in Charmhub." + ) + # TODO(#1810): Replace this with os.EX_OK + return 0 class UnregisterNameCommand(CharmcraftCommand): @@ -587,7 +622,9 @@ def run(self, parsed_args): if not result.ok: if parsed_args.format: - errors = [{"code": err.code, "message": err.message} for err in result.errors] + errors = [ + {"code": err.code, "message": err.message} for err in result.errors + ] info = {"errors": errors} emit.message(cli.format_content(info, parsed_args.format)) else: @@ -598,7 +635,9 @@ def run(self, parsed_args): if parsed_args.release: # also release! - store.release(name, result.revision, parsed_args.release, parsed_args.resource) + store.release( + name, result.revision, parsed_args.release, parsed_args.resource + ) if parsed_args.format: info = {"revision": result.revision} @@ -611,7 +650,9 @@ def run(self, parsed_args): if parsed_args.resource: msg += " (attaching resources: {})" args.append( - ", ".join(f"{r.name!r} r{r.revision}" for r in parsed_args.resource) + ", ".join( + f"{r.name!r} r{r.revision}" for r in parsed_args.resource + ) ) emit.message(msg.format(*args)) return 0 @@ -677,7 +718,9 @@ def run(self, parsed_args): "status": item.status, } if item.errors: - prog_info["errors"] = [{"message": e.message, "code": e.code} for e in item.errors] + prog_info["errors"] = [ + {"message": e.message, "code": e.code} for e in item.errors + ] prog_data.append(prog_info) if parsed_args.format: @@ -781,7 +824,9 @@ def run(self, parsed_args): args = [parsed_args.revision, parsed_args.name, ", ".join(parsed_args.channel)] if parsed_args.resource: msg += " (attaching resources: {})" - args.append(", ".join(f"{r.name!r} r{r.revision}" for r in parsed_args.resource)) + args.append( + ", ".join(f"{r.name!r} r{r.revision}" for r in parsed_args.resource) + ) emit.message(msg.format(*args)) @@ -832,8 +877,12 @@ def run(self, parsed_args: "Namespace") -> None: raise CraftError("promote-bundle must be run on a bundle.") # Check snapcraft for equiv logic - from_channel = charmcraft.store.models.ChannelData.from_str(parsed_args.from_channel) - to_channel = charmcraft.store.models.ChannelData.from_str(parsed_args.to_channel) + from_channel = charmcraft.store.models.ChannelData.from_str( + parsed_args.from_channel + ) + to_channel = charmcraft.store.models.ChannelData.from_str( + parsed_args.to_channel + ) if to_channel == from_channel: raise CraftError("Cannot promote from a channel to the same channel.") @@ -868,7 +917,9 @@ def run(self, parsed_args: "Namespace") -> None: emit.debug(f"Creating bundle file in {str(output_bundle)}") output_bundle /= "bundle.yaml" else: - raise CraftError(f"Not a valid bundle output path: {str(output_bundle)}") + raise CraftError( + f"Not a valid bundle output path: {str(output_bundle)}" + ) elif output_bundle is not None: if not output_bundle.suffix: output_bundle /= "bundle.yaml" @@ -876,14 +927,18 @@ def run(self, parsed_args: "Namespace") -> None: if parent.exists(): if os.access(parent, os.W_OK): break - raise CraftError(f"Bundle output directory not writable: {str(parent)}") + raise CraftError( + f"Bundle output directory not writable: {str(parent)}" + ) # Load bundle # TODO: When this goes into the StoreService, use the service's own project_path bundle_path = self._services.package.project_dir / "bundle.yaml" bundle_config = utils.load_yaml(bundle_path) if bundle_config is None: - raise CraftError(f"Missing or invalid main bundle file: {(str(bundle_path))}") + raise CraftError( + f"Missing or invalid main bundle file: {(str(bundle_path))}" + ) bundle_name = bundle_config.get("name") if not bundle_name: raise CraftError( @@ -905,7 +960,9 @@ def run(self, parsed_args: "Namespace") -> None: ) store = Store(env.get_store_config()) - registered_names: list[Entity] = store.list_registered_names(include_collaborations=True) + registered_names: list[Entity] = store.list_registered_names( + include_collaborations=True + ) name_map = {entity.name: entity for entity in registered_names} if bundle_name not in name_map: @@ -915,7 +972,9 @@ def run(self, parsed_args: "Namespace") -> None: ) elif name_map[bundle_name].entity_type != EntityType.bundle: entity_type = name_map[bundle_name].entity_type - raise CraftError(f"Store Entity {bundle_name} is a {entity_type}, not a bundle.") + raise CraftError( + f"Store Entity {bundle_name} is a {entity_type}, not a bundle." + ) invalid_charms = [] non_charms = [] @@ -932,7 +991,9 @@ def run(self, parsed_args: "Namespace") -> None: ) if non_charms: non_charm_list = utils.humanize_list(non_charms, "and") - raise CraftError(f"The following store entities are not charms: {non_charm_list}") + raise CraftError( + f"The following store entities are not charms: {non_charm_list}" + ) # Revision in the source channel channel_map, *_ = store.list_releases(bundle_name) @@ -942,7 +1003,9 @@ def run(self, parsed_args: "Namespace") -> None: bundle_revision = release.revision break if bundle_revision is None: - raise CraftError("Cannot find a bundle released to the given source channel.") + raise CraftError( + "Cannot find a bundle released to the given source channel." + ) # Get source channel charms charm_revisions: dict[str, int] = {} @@ -1007,7 +1070,9 @@ def run(self, parsed_args: "Namespace") -> None: # Upload the bundle and release it to the target channel. store.upload(bundle_name, zipname) - release_info = store.release(bundle_name, bundle_revision, [parsed_args.to_channel], []) + release_info = store.release( + bundle_name, bundle_revision, [parsed_args.to_channel], [] + ) # There should only be one revision. release_info = release_info["released"][0] @@ -1048,10 +1113,14 @@ def run(self, parsed_args): """Run the command.""" store = Store(env.get_store_config()) revision = None # revision None will actually close the channel - channels = [parsed_args.channel] # the API accepts multiple channels, we have only one + channels = [ + parsed_args.channel + ] # the API accepts multiple channels, we have only one resources = [] # not really used when closing channels store.release(parsed_args.name, revision, channels, resources) - emit.message(f"Closed {parsed_args.channel!r} channel for {parsed_args.name!r}.") + emit.message( + f"Closed {parsed_args.channel!r} channel for {parsed_args.name!r}." + ) class StatusCommand(CharmcraftCommand): @@ -1162,7 +1231,8 @@ def run(self, parsed_args): # bases are shown alphabetically ordered sorted_bases = sorted( - releases_by_base, key=lambda b: b and (b.name, b.channel, b.architecture) + releases_by_base, + key=lambda b: b and (b.name, b.channel, b.architecture), ) for base in sorted_bases: releases_by_channel = releases_by_base[base] @@ -1178,7 +1248,9 @@ def run(self, parsed_args): } prog_releases_info = [] - prog_channels_info.append({"base": prog_base, "releases": prog_releases_info}) + prog_channels_info.append( + {"base": prog_base, "releases": prog_releases_info} + ) release_shown_for_this_track_base = False @@ -1190,7 +1262,11 @@ def run(self, parsed_args): "↑" if release_shown_for_this_track_base else "-" ) prog_version = prog_revno = prog_resources = None - prog_status = "tracking" if release_shown_for_this_track_base else "closed" + prog_status = ( + "tracking" + if release_shown_for_this_track_base + else "closed" + ) else: release_shown_for_this_track_base = True revno = prog_revno = release.revision @@ -1248,7 +1324,9 @@ def run(self, parsed_args): if parsed_args.format: emit.message(cli.format_content(prog_data, parsed_args.format)) else: - table = tabulate(human_data, headers=headers, tablefmt="plain", numalign="left") + table = tabulate( + human_data, headers=headers, tablefmt="plain", numalign="left" + ) for line in table.splitlines(): emit.message(line) @@ -1293,13 +1371,17 @@ def run(self, parsed_args): lib_name = parsed_args.name valid_all_chars = set(string.ascii_lowercase + string.digits + "_") valid_first_char = string.ascii_lowercase - if set(lib_name) - valid_all_chars or not lib_name or lib_name[0] not in valid_first_char: + if ( + set(lib_name) - valid_all_chars + or not lib_name + or lib_name[0] not in valid_first_char + ): raise CraftError( "Invalid library name. Must only use lowercase alphanumeric " "characters and underscore, starting with alpha." ) - charm_name = self._services.project.name or utils.get_name_from_metadata() + charm_name = self._services.project.name or utils.get_name_from_yaml() if charm_name is None: raise CraftError( "Cannot find a valid charm name in charm definition. " @@ -1329,7 +1411,9 @@ def run(self, parsed_args): lib_path.parent.mkdir(parents=True, exist_ok=True) lib_path.write_text(template.render(context)) except OSError as exc: - raise CraftError(f"Error writing the library in {str(lib_path)!r}: {exc!r}.") + raise CraftError( + f"Error writing the library in {str(lib_path)!r}: {exc!r}." + ) if parsed_args.format: info = {"library_id": lib_id} @@ -1376,7 +1460,7 @@ def fill_parser(self, parser): def run(self, parsed_args): """Run the command.""" - charm_name = self._services.project.name or utils.get_name_from_metadata() + charm_name = self._services.project.name or utils.get_name_from_yaml() if charm_name is None: raise CraftError( "Cannot find a valid charm name in charm definition. " @@ -1397,7 +1481,9 @@ def run(self, parsed_args): else: local_libs_data = utils.get_libs_from_tree(charm_name) found_libs = [lib_data.full_name for lib_data in local_libs_data] - (charmlib_path,) = {lib_data.path.parent.parent for lib_data in local_libs_data} + (charmlib_path,) = { + lib_data.path.parent.parent for lib_data in local_libs_data + } emit.debug(f"Libraries found under {str(charmlib_path)!r}: {found_libs}") # check if something needs to be done @@ -1426,7 +1512,9 @@ def run(self, parsed_args): elif tip.patch == lib_data.patch: # the store has same version numbers than local if tip.content_hash == lib_data.content_hash: - error_message = f"Library {lib_data.full_name} is already updated in Charmhub." + error_message = ( + f"Library {lib_data.full_name} is already updated in Charmhub." + ) else: # but shouldn't as hash is different! error_message = ( @@ -1541,7 +1629,11 @@ def run(self, parsed_args: argparse.Namespace) -> None: to_query = [] for lib in local_libs_data: if lib.lib_id is None: - item = {"charm_name": lib.charm_name, "lib_name": lib.lib_name, "api": lib.api} + item = { + "charm_name": lib.charm_name, + "lib_name": lib.lib_name, + "api": lib.api, + } else: item = {"lib_id": lib.lib_id, "api": lib.api} to_query.append(item) @@ -1554,7 +1646,10 @@ def run(self, parsed_args: argparse.Namespace) -> None: # fix any missing lib id using the Store info if lib_data.lib_id is None: for tip in libs_tips.values(): - if lib_data.charm_name == tip.charm_name and lib_data.lib_name == tip.lib_name: + if ( + lib_data.charm_name == tip.charm_name + and lib_data.lib_name == tip.lib_name + ): lib_data = dataclasses.replace(lib_data, lib_id=tip.lib_id) break @@ -1568,9 +1663,7 @@ def run(self, parsed_args: argparse.Namespace) -> None: pass elif tip.patch < lib_data.patch: # the store has a lower version numbers than local - error_message = ( - f"Library {lib_data.full_name} has local changes, cannot be updated." - ) + error_message = f"Library {lib_data.full_name} has local changes, cannot be updated." else: # same versions locally and in the store if tip.content_hash == lib_data.content_hash: @@ -1579,15 +1672,15 @@ def run(self, parsed_args: argparse.Namespace) -> None: f"version {tip.api:d}.{tip.patch:d}." ) else: - error_message = ( - f"Library {lib_data.full_name} has local changes, cannot be updated." - ) + error_message = f"Library {lib_data.full_name} has local changes, cannot be updated." analysis.append((lib_data, error_message)) full_lib_data = [] for lib_data, error_message in analysis: if error_message is None: - downloaded = store.get_library(lib_data.charm_name, lib_data.lib_id, lib_data.api) + downloaded = store.get_library( + lib_data.charm_name, lib_data.lib_id, lib_data.api + ) if lib_data.content is None: # locally new lib_data.path.parent.mkdir(parents=True, exist_ok=True) @@ -1696,7 +1789,8 @@ def run(self, parsed_args: argparse.Namespace) -> None: emit.trace(f"Library metadata retrieved: {libs_metadata}") local_libs = { - f"{lib.charm_name}.{lib.lib_name}": lib for lib in utils.get_libs_from_tree() + f"{lib.charm_name}.{lib.lib_name}": lib + for lib in utils.get_libs_from_tree() } emit.trace(f"Local libraries: {local_libs}") @@ -1712,7 +1806,9 @@ def run(self, parsed_args: argparse.Namespace) -> None: permanent=True, ) continue - lib_name = utils.get_lib_module_name(lib_md.charm_name, lib_md.lib_name, lib_md.api) + lib_name = utils.get_lib_module_name( + lib_md.charm_name, lib_md.lib_name, lib_md.api + ) emit.progress(f"Downloading {lib_name}") lib = store.get_library( charm_name=lib_md.charm_name, @@ -1725,7 +1821,9 @@ def run(self, parsed_args: argparse.Namespace) -> None: f"Store returned no content for '{lib.charm_name}.{lib.lib_name}'" ) downloaded_libs += 1 - lib_path = utils.get_lib_path(lib_md.charm_name, lib_md.lib_name, lib_md.api) + lib_path = utils.get_lib_path( + lib_md.charm_name, lib_md.lib_name, lib_md.api + ) lib_path.parent.mkdir(exist_ok=True, parents=True) lib_path.write_text(lib.content) emit.debug(f"Downloaded {lib_name}.") @@ -1775,7 +1873,7 @@ def run(self, parsed_args): if parsed_args.name: charm_name = parsed_args.name else: - charm_name = utils.get_name_from_metadata() + charm_name = utils.get_name_from_yaml() if charm_name is None: raise CraftError( "Can't access name in 'metadata.yaml' file. The 'list-lib' command must " @@ -1789,7 +1887,9 @@ def run(self, parsed_args): libs_tips = store.get_libraries_tips(to_query) # order it - libs_data = sorted(libs_tips.values(), key=attrgetter("lib_name", "api", "patch")) + libs_data = sorted( + libs_tips.values(), key=attrgetter("lib_name", "api", "patch") + ) if parsed_args.format: info = [ @@ -1836,7 +1936,9 @@ class ListResourcesCommand(CharmcraftCommand): def fill_parser(self, parser): """Add own parameters to the general parser.""" super().fill_parser(parser) - parser.add_argument("charm_name", metavar="charm-name", help="The name of the charm") + parser.add_argument( + "charm_name", metavar="charm-name", help="The name of the charm" + ) def run(self, parsed_args): """Run the command.""" @@ -1867,8 +1969,12 @@ def run(self, parsed_args): data = [] for revision, items in sorted(by_revision.items(), reverse=True): initial, *rest = sorted(items, key=attrgetter("name")) - data.append((revision, initial.name, initial.resource_type, initial.optional)) - data.extend(("", item.name, item.resource_type, item.optional) for item in rest) + data.append( + (revision, initial.name, initial.resource_type, initial.optional) + ) + data.extend( + ("", item.name, item.resource_type, item.optional) for item in rest + ) table = tabulate(data, headers=headers, tablefmt="plain", numalign="left") for line in table.splitlines(): @@ -1908,7 +2014,9 @@ def fill_parser(self, parser): metavar="charm-name", help="The charm name to associate the resource", ) - parser.add_argument("resource_name", metavar="resource-name", help="The resource name") + parser.add_argument( + "resource_name", metavar="resource-name", help="The resource name" + ) group = parser.add_mutually_exclusive_group(required=True) group.add_argument( "--filepath", @@ -1943,7 +2051,9 @@ def run(self, parsed_args: argparse.Namespace) -> int: architectures = ["all"] if parsed_args.filepath: - emit.progress(f"Uploading resource directly from file {str(parsed_args.filepath)!r}.") + emit.progress( + f"Uploading resource directly from file {str(parsed_args.filepath)!r}." + ) bases = [{"name": "all", "channel": "all", "architectures": architectures}] result = store.upload_resource( parsed_args.charm_name, @@ -2011,11 +2121,13 @@ def run(self, parsed_args: argparse.Namespace) -> int: dest_password=credentials.password, ) - image_arch = [ - craft_platforms.DebianArchitecture.from_machine(arch).value + image_arch = { + image_service.convert_go_arch_to_charm_arch(arch).value for arch in image_metadata.architectures + } + bases = [ + {"name": "all", "channel": "all", "architectures": sorted(image_arch)} ] - bases = [{"name": "all", "channel": "all", "architectures": image_arch}] # all is green, get the blob to upload to Charmhub content = store.get_oci_image_blob( @@ -2035,7 +2147,9 @@ def run(self, parsed_args: argparse.Namespace) -> int: bases=bases, ) else: - raise CraftError("Either a file path or an image descriptor must be passed.") + raise CraftError( + "Either a file path or an image descriptor must be passed." + ) if result.ok: if parsed_args.format: @@ -2051,7 +2165,8 @@ def run(self, parsed_args: argparse.Namespace) -> int: if parsed_args.format: info = { "errors": [ - {"code": error.code, "message": error.message} for error in result.errors + {"code": error.code, "message": error.message} + for error in result.errors ] } emit.message(cli.format_content(info, parsed_args.format)) @@ -2098,7 +2213,9 @@ def fill_parser(self, parser) -> None: metavar="charm-name", help="The name of the charm", ) - parser.add_argument("resource_name", metavar="resource-name", help="The resource name") + parser.add_argument( + "resource_name", metavar="resource-name", help="The resource name" + ) parser.add_argument( "--revision", dest="revisions", @@ -2147,16 +2264,22 @@ def write_output( if update.updated_at is not None else "--" ), - "Architectures": ",".join(_get_architectures_from_bases(update.bases)), + "Architectures": ",".join( + _get_architectures_from_bases(update.bases) + ), } - for update in sorted(updates, key=lambda rev: int(rev.revision), reverse=True) + for update in sorted( + updates, key=lambda rev: int(rev.revision), reverse=True + ) ] else: updates_dicts = [ { "revision": update.revision, "updated_at": ( - update.updated_at.isoformat() if update.updated_at is not None else None + update.updated_at.isoformat() + if update.updated_at is not None + else None ), "architectures": _get_architectures_from_bases(update.bases), } @@ -2194,12 +2317,16 @@ def fill_parser(self, parser): metavar="charm-name", help="The charm name to associate the resource", ) - parser.add_argument("resource_name", metavar="resource-name", help="The resource name") + parser.add_argument( + "resource_name", metavar="resource-name", help="The resource name" + ) def run(self, parsed_args): """Run the command.""" store = Store(env.get_store_config()) - result = store.list_resource_revisions(parsed_args.charm_name, parsed_args.resource_name) + result = store.list_resource_revisions( + parsed_args.charm_name, parsed_args.resource_name + ) if parsed_args.format: info = [ @@ -2231,12 +2358,16 @@ def run(self, parsed_args): for item in result ] - table = tabulate(data, headers=headers, tablefmt="plain", colalign=custom_alignment) + table = tabulate( + data, headers=headers, tablefmt="plain", colalign=custom_alignment + ) for line in table.splitlines(): emit.message(line) -def _get_architectures_from_bases(bases: typing.Iterable[ResponseCharmResourceBase]) -> list[str]: +def _get_architectures_from_bases( + bases: typing.Iterable[ResponseCharmResourceBase], +) -> list[str]: """Get a list of all architectures from an iterable of resource bases.""" architectures = set() for base in bases: diff --git a/charmcraft/application/commands/test.py b/charmcraft/application/commands/test.py index 5ab6bd7e2..c68b59211 100644 --- a/charmcraft/application/commands/test.py +++ b/charmcraft/application/commands/test.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """Infrastructure for the 'test' command.""" + import argparse import os import subprocess diff --git a/charmcraft/application/commands/version.py b/charmcraft/application/commands/version.py index 7df00934a..d1aaff39b 100644 --- a/charmcraft/application/commands/version.py +++ b/charmcraft/application/commands/version.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Version command.""" + import argparse import json diff --git a/charmcraft/application/main.py b/charmcraft/application/main.py index 21cd48f98..c6875c135 100644 --- a/charmcraft/application/main.py +++ b/charmcraft/application/main.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """New entrypoint for charmcraft.""" + from __future__ import annotations import pathlib @@ -23,12 +24,11 @@ import craft_application import craft_cli from craft_application import util -from craft_parts.plugins import plugins +from craft_parts.plugins.plugins import PluginType from overrides import override -from charmcraft import extensions, models, preprocess, services +from charmcraft import extensions, models, parts, preprocess, services from charmcraft.application import commands -from charmcraft.parts import BundlePlugin, CharmPlugin, ReactivePlugin from charmcraft.services import CharmcraftServiceFactory GENERAL_SUMMARY = """ @@ -98,7 +98,6 @@ def _check_deprecated(self, yaml_data: dict[str, Any]) -> None: def _extra_yaml_transform( self, yaml_data: dict[str, Any], *, build_on: str, build_for: str | None ) -> dict[str, Any]: - # Extensions get applied on as close as possible to what the user provided. yaml_data = extensions.apply_extensions(self.project_dir, yaml_data.copy()) @@ -119,6 +118,10 @@ def _configure_services(self, provider_name: str | None) -> None: project_dir=self.project_dir, build_plan=self._build_plan, ) + self.services.update_kwargs( + "charm_libs", + project_dir=self.project_dir, + ) def configure(self, global_args: dict[str, Any]) -> None: """Configure the application using any global arguments.""" @@ -131,14 +134,16 @@ def _get_dispatcher(self) -> craft_cli.Dispatcher: return self._dispatcher @override - def _get_app_plugins(self) -> dict[str, plugins.PluginType]: - return {"charm": CharmPlugin, "bundle": BundlePlugin, "reactive": ReactivePlugin} + def _get_app_plugins(self) -> dict[str, PluginType]: + return parts.get_app_plugins() @override def _pre_run(self, dispatcher: craft_cli.Dispatcher) -> None: """Override to get project_dir early.""" super()._pre_run(dispatcher) - if not self.is_managed() and not getattr(dispatcher.parsed_args(), "project_dir", None): + if not self.is_managed() and not getattr( + dispatcher.parsed_args(), "project_dir", None + ): self.project_dir = pathlib.Path().expanduser().resolve() def run_managed(self, platform: str | None, build_for: str | None) -> None: @@ -159,10 +164,14 @@ def run_managed(self, platform: str | None, build_for: str | None) -> None: output_path.mkdir(parents=True, exist_ok=True) package_file_path = self._work_dir / ".charmcraft_output_packages.txt" if package_file_path.exists(): - package_files = package_file_path.read_text().splitlines(keepends=False) + package_files = package_file_path.read_text().splitlines( + keepends=False + ) package_file_path.unlink(missing_ok=True) for filename in package_files: - shutil.move(str(self._work_dir / filename), output_path / filename) + shutil.move( + str(self._work_dir / filename), output_path / filename + ) def _expand_environment(self, yaml_data: dict[str, Any], build_for: str) -> None: """Perform expansion of project environment variables. diff --git a/charmcraft/bases.py b/charmcraft/bases.py deleted file mode 100644 index c6af77e39..000000000 --- a/charmcraft/bases.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2021 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# For further info, check https://github.com/canonical/charmcraft - -"""Logic dealing with bases.""" - -from craft_application import util - -from charmcraft.models.charmcraft import Base -from charmcraft.utils import get_os_platform - - -def get_host_as_base() -> Base: - """Get host OS represented as Base. - - The host OS name is translated to lower-case for consistency. - - :returns: Base configuration matching host. - """ - os_platform = get_os_platform() - host_arch = util.get_host_architecture() - name = os_platform.system.lower() - channel = os_platform.release - - return Base(name=name, channel=channel, architectures=[host_arch]) - - -def check_if_base_matches_host(base: Base) -> tuple[bool, str | None]: - """Check if given base matches the host. - - :param base: Base to check. - - :returns: Tuple of bool indicating whether it is a match, with optional - reason if not a match. - """ - host_base = get_host_as_base() - host_arch = host_base.architectures[0] - - if host_base.name != base.name: - return False, f"name {base.name!r} does not match host {host_base.name!r}" - - # For Ubuntu, MacOS and Windows, use the full version. - # For other OSes, use the major version only. - - if host_base.name in ("ubuntu", "darwin", "windows"): - host_channel = host_base.channel - else: - host_channel = host_base.channel.split(".")[0] - if host_channel != base.channel: - return ( - False, - f"channel {base.channel!r} does not match host {host_base.channel!r}", - ) - - if host_arch not in base.architectures: - return ( - False, - f"host architecture {host_arch!r} not in base architectures {base.architectures!r}", - ) - - return True, None diff --git a/charmcraft/charm_builder.py b/charmcraft/charm_builder.py index 40a0bb762..de8ca1585 100644 --- a/charmcraft/charm_builder.py +++ b/charmcraft/charm_builder.py @@ -44,7 +44,9 @@ MINIMUM_PIP_VERSION = (24, 1) KNOWN_GOOD_PIP_URL = "https://files.pythonhosted.org/packages/c0/d0/9641dc7b05877874c6418f8034ddefc809495e65caa14d38c7551cd114bb/pip-24.1.1.tar.gz" -KNOWN_GOOD_PIP_HASH = "sha256:5aa64f65e1952733ee0a9a9b1f52496ebdb3f3077cc46f80a16d983b58d1180a" +KNOWN_GOOD_PIP_HASH = ( + "sha256:5aa64f65e1952733ee0a9a9b1f52496ebdb3f3077cc46f80a16d983b58d1180a" +) def relativise(src, dst): @@ -111,7 +113,9 @@ def create_symlink(self, src_path, dest_path): dest_path.symlink_to(relative_link) else: rel_path = src_path.relative_to(self.builddir) - print(f"Ignoring symlink because targets outside the project: {str(rel_path)!r}") + print( + f"Ignoring symlink because targets outside the project: {str(rel_path)!r}" + ) @instrum.Timer("Handling generic paths") def handle_generic_paths(self): @@ -125,7 +129,9 @@ def handle_generic_paths(self): """ print("Linking in generic paths") - for basedir, dirnames, filenames in os.walk(str(self.builddir), followlinks=False): + for basedir, dirnames, filenames in os.walk( + str(self.builddir), followlinks=False + ): abs_basedir = pathlib.Path(basedir) rel_basedir = abs_basedir.relative_to(self.builddir) @@ -204,10 +210,14 @@ def handle_dispatcher(self, linked_entrypoint): if node.resolve() == linked_entrypoint: current_hooks_to_replace.append(node) node.unlink() - print(f"Replacing existing hook {node.name!r} as it's a symlink to the entrypoint") + print( + f"Replacing existing hook {node.name!r} as it's a symlink to the entrypoint" + ) # include the mandatory ones and those we need to replace - hooknames = const.MANDATORY_HOOK_NAMES | {x.name for x in current_hooks_to_replace} + hooknames = const.MANDATORY_HOOK_NAMES | { + x.name for x in current_hooks_to_replace + } for hookname in hooknames: print(f"Creating the {hookname!r} hook script pointing to dispatch") dest_hook = dest_hookpath / hookname @@ -243,6 +253,7 @@ def _install_dependencies(self, staging_venv_dir: pathlib.Path): [ pip_cmd, "install", + "--force-reinstall", f"pip@{KNOWN_GOOD_PIP_URL}", ] ) @@ -271,9 +282,13 @@ def _install_dependencies(self, staging_venv_dir: pathlib.Path): ) if self.python_packages: print("Installing Python pre-dependencies from source.") - _process_run([pip_cmd, "install", "--no-binary=:all:", *self.python_packages]) + _process_run( + [pip_cmd, "install", "--no-binary=:all:", *self.python_packages] + ) if self.requirement_paths or self.charmlib_deps: - print("Installing packages from requirements files and charm lib dependencies.") + print( + "Installing packages from requirements files and charm lib dependencies." + ) requirements_packages = get_requirements_file_package_names( *self.requirement_paths ) @@ -410,7 +425,9 @@ def _process_run(cmd: list[str]) -> None: retcode = proc.wait() if retcode: - raise RuntimeError(f"Subprocess command {cmd} execution failed with retcode {retcode}") + raise RuntimeError( + f"Subprocess command {cmd} execution failed with retcode {retcode}" + ) def _parse_arguments() -> argparse.Namespace: diff --git a/charmcraft/cmdbase.py b/charmcraft/cmdbase.py deleted file mode 100644 index b7035e688..000000000 --- a/charmcraft/cmdbase.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright 2020-2022 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# For further info, check https://github.com/canonical/charmcraft - -"""Infrastructure for common base commands functionality.""" - -import json - -import craft_cli -from craft_cli import ArgumentParsingError, CraftError - -JSON_FORMAT = "json" -FORMAT_HELP_STR = "Produce the result in the specified format (currently only 'json')" - - -class BaseCommand(craft_cli.BaseCommand): - """Subclass this to create a new command. - - The following default attribute is provided beyond craft-cli ones: - - The subclass must be declared in the corresponding section of main.COMMAND_GROUPS. - - If the command may produce the result in a programmatic-friendly format, it - should call the 'include_format_option' method to properly affect the parser and - then emit only one message with the result of the 'format_content' method. - """ - - def format_content(self, fmt, content): - """Format the content.""" - if fmt == JSON_FORMAT: - return json.dumps(content, indent=4) - raise ValueError("Specified format not supported.") - - def include_format_option(self, parser): - """Add the 'format' option to this parser.""" - parser.add_argument( - "--format", - choices=[JSON_FORMAT], - help=FORMAT_HELP_STR, - ) - - def _check_config(self, config_file: bool = False, bases: bool = False) -> None: - """Check if valid config contents exists. - - - config_file: if True, check if a valid "charmcraft.yaml" file exists. - - bases: if True, check if a valid "bases" in "charmcraft.yaml" exists. - - :raises ArgumentParsingError: if 'charmcraft.yaml' file is missing. - :raises CraftError: if any specified config are missing or invalid. - """ - if config_file and not self.config.project.config_provided: - raise ArgumentParsingError( - "The specified command needs a valid 'charmcraft.yaml' configuration file (in " - "the current directory or where specified with --project-dir option); see " - "the reference: https://discourse.charmhub.io/t/charmcraft-configuration/4138" - ) - - if bases and self.config.bases is None: - raise CraftError( - "The specified command needs a valid 'bases' in 'charmcraft.yaml' configuration " - "file (in the current directory or where specified with --project-dir option)." - ) diff --git a/charmcraft/const.py b/charmcraft/const.py index b2b7ff878..175748e77 100644 --- a/charmcraft/const.py +++ b/charmcraft/const.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """Constants used in charmcraft.""" + import enum from typing import Literal @@ -60,6 +61,7 @@ "ubuntu@22.04", "ubuntu@23.10", "ubuntu@24.04", + "ubuntu@24.10", "centos@7", "almalinux@9", ] @@ -73,7 +75,6 @@ BaseName("ubuntu", "18.04"), BaseName("ubuntu", "20.04"), BaseName("ubuntu", "22.04"), - BaseName("ubuntu", "23.10"), BaseName("ubuntu", "24.04"), BaseName("ubuntu", "devel"), BaseName("centos", "7"), @@ -98,6 +99,20 @@ def __str__(self) -> str: return str(self.value) +GO_ARCH_TO_CHARM_ARCH = { + "arm": "armhf", + "ppc64le": "ppc64el", +} +"""Mapping to convert go architectures to charm architecture strings. + +Architectures not included here are the same in GOARCH as charm arch names. + +go architectures are also used as OCI image architectures. +Reference 1: https://github.com/opencontainers/image-spec/blob/main/config.md#properties +Reference 2: https://go.dev/doc/install/source#environment +""" + + SUPPORTED_ARCHITECTURES = frozenset(arch.value for arch in CharmArch) diff --git a/charmcraft/dispatch.py b/charmcraft/dispatch.py new file mode 100644 index 000000000..cd02457b9 --- /dev/null +++ b/charmcraft/dispatch.py @@ -0,0 +1,67 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Module for helping with creating a dispatch script for charms.""" + +import pathlib + +import craft_cli + +from charmcraft import const + +DISPATCH_SCRIPT_TEMPLATE = """\ +#!/bin/sh +dispatch_path="$(dirname $(realpath $0))" +venv_bin_path="${{dispatch_path}}/venv/bin" +python_path="${{venv_bin_path}}/python" +if [ ! -e "${{python_path}}" ]; then + mkdir -p "{{venv_bin_path}}" + ln -s $(which python3) "${{python_path}}" +fi + +# Add charm lib and source directories to PYTHONPATH so the charm can import +# libraries and its own modules as expected. +export PYTHONPATH="${{dispatch_path}}/lib:${{dispatch_path}}/src" + +# Add the charm's lib and usr/lib directories to LD_LIBRARY_PATH, allowing +# staged packages to be discovered by the dynamic linker. +export LD_LIBRARY_PATH="${{dispatch_path}}/usr/lib:${{dispatch_path}}/lib:${{dispatch_path}}/usr/lib/$(uname -m)-linux-gnu" + +exec "${{python_path}}" "${{dispatch_path}}/{entrypoint}" +""" + + +def create_dispatch( + *, prime_dir: pathlib.Path, entrypoint: str = "src/charm.py" +) -> bool: + """If the charm has no hooks or dispatch, create a dispatch file. + + :param prime_dir: the prime directory to inspect and create the file in. + :returns: True if the file was created, False otherwise. + """ + dispatch_path = prime_dir / const.DISPATCH_FILENAME + hooks_path = prime_dir / const.HOOKS_DIRNAME + + if hooks_path.is_dir() or dispatch_path.is_file(): + return False + + if not (prime_dir / entrypoint).exists(): + return False + + craft_cli.emit.progress("Creating dispatch file") + dispatch_path.write_text(DISPATCH_SCRIPT_TEMPLATE.format(entrypoint=entrypoint)) + dispatch_path.chmod(mode=0o755) + + return True diff --git a/charmcraft/env.py b/charmcraft/env.py index df3f112b9..922308677 100644 --- a/charmcraft/env.py +++ b/charmcraft/env.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """Charmcraft environment utilities.""" + import dataclasses import os import pathlib @@ -63,7 +64,9 @@ def is_charmcraft_running_from_snap() -> bool: def is_charmcraft_running_in_managed_mode() -> bool: """Check if charmcraft is running in a managed environment.""" - managed_flag = os.getenv(const.MANAGED_MODE_ENV_VAR, os.getenv("CRAFT_MANAGED_MODE", "n")) + managed_flag = os.getenv( + const.MANAGED_MODE_ENV_VAR, os.getenv("CRAFT_MANAGED_MODE", "n") + ) return strtobool(managed_flag) @@ -82,6 +85,12 @@ class CharmhubConfig: def get_store_config() -> CharmhubConfig: """Get the appropriate configuration for the store.""" api_url = os.getenv(const.STORE_API_ENV_VAR, DEFAULT_CHARMHUB_CONFIG.api_url) - storage_url = os.getenv(const.STORE_STORAGE_ENV_VAR, DEFAULT_CHARMHUB_CONFIG.storage_url) - registry_url = os.getenv(const.STORE_REGISTRY_ENV_VAR, DEFAULT_CHARMHUB_CONFIG.registry_url) - return CharmhubConfig(api_url=api_url, storage_url=storage_url, registry_url=registry_url) + storage_url = os.getenv( + const.STORE_STORAGE_ENV_VAR, DEFAULT_CHARMHUB_CONFIG.storage_url + ) + registry_url = os.getenv( + const.STORE_REGISTRY_ENV_VAR, DEFAULT_CHARMHUB_CONFIG.registry_url + ) + return CharmhubConfig( + api_url=api_url, storage_url=storage_url, registry_url=registry_url + ) diff --git a/charmcraft/errors.py b/charmcraft/errors.py index 6764a50e3..be1d545d1 100644 --- a/charmcraft/errors.py +++ b/charmcraft/errors.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Charmcraft error classes.""" + import io import pathlib import shlex @@ -81,7 +82,9 @@ class DuplicateCharmsError(CraftError): "Files can be seen with --verbosity=debug" ) - def __init__(self, charms: Mapping[str, Iterable[pathlib.Path]], source: bool = True): + def __init__( + self, charms: Mapping[str, Iterable[pathlib.Path]], source: bool = True + ): import charmcraft.utils charm_names = charmcraft.utils.humanize_list(charms.keys(), "and") @@ -103,7 +106,10 @@ def _format_details(charms: Mapping[str, Iterable[pathlib.Path]]) -> str: print(path_tree_line_format.format(name="CHARM", path="PATHS"), file=details) for charm, paths in charms.items(): path_iter = iter(paths) - print(path_tree_line_format.format(name=charm, path=next(path_iter)), file=details) + print( + path_tree_line_format.format(name=charm, path=next(path_iter)), + file=details, + ) for path in path_iter: print(path_tree_line_format.format(name="", path=path), file=details) return details.getvalue() diff --git a/charmcraft/extensions/__init__.py b/charmcraft/extensions/__init__.py index 05467123c..5ef75cca7 100644 --- a/charmcraft/extensions/__init__.py +++ b/charmcraft/extensions/__init__.py @@ -17,7 +17,12 @@ """Extension processor and related utilities.""" from charmcraft.extensions._utils import apply_extensions -from charmcraft.extensions.app import DjangoFramework, FlaskFramework, GoFramework +from charmcraft.extensions.app import ( + DjangoFramework, + FastAPIFramework, + FlaskFramework, + GoFramework, +) from charmcraft.extensions.extension import Extension from charmcraft.extensions.registry import ( get_extension_class, @@ -42,3 +47,4 @@ register("flask-framework", FlaskFramework) register("django-framework", DjangoFramework) register("go-framework", GoFramework) +register("fastapi-framework", FastAPIFramework) diff --git a/charmcraft/extensions/_utils.py b/charmcraft/extensions/_utils.py index 3a816ad2b..1090205fc 100644 --- a/charmcraft/extensions/_utils.py +++ b/charmcraft/extensions/_utils.py @@ -41,7 +41,9 @@ def apply_extensions(project_root: Path, yaml_data: dict[str, Any]) -> dict[str, # Process extensions in a consistent order for extension_name in sorted(declared_extensions): extension_class = get_extension_class(extension_name) - extension = extension_class(project_root=project_root, yaml_data=copy.deepcopy(yaml_data)) + extension = extension_class( + project_root=project_root, yaml_data=copy.deepcopy(yaml_data) + ) extension.validate(extension_name=extension_name) _apply_extension(yaml_data, extension) return yaml_data diff --git a/charmcraft/extensions/app.py b/charmcraft/extensions/app.py index b7c4bd3a6..f7a6eb46d 100644 --- a/charmcraft/extensions/app.py +++ b/charmcraft/extensions/app.py @@ -31,7 +31,7 @@ class _AppBase(Extension): {"lib": "traefik_k8s.ingress", "version": "2"}, {"lib": "observability_libs.juju_topology", "version": "0"}, {"lib": "grafana_k8s.grafana_dashboard", "version": "0"}, - {"lib": "loki_k8s.loki_push_api", "version": "0"}, + {"lib": "loki_k8s.loki_push_api", "version": "1"}, {"lib": "data_platform_libs.data_interfaces", "version": "0"}, {"lib": "prometheus_k8s.prometheus_scrape", "version": "0"}, {"lib": "redis_k8s.redis", "version": "0"}, @@ -80,7 +80,11 @@ def _check_input(self) -> None: f"the '{self.framework}-framework' extension is incompatible with " f"customized charm part" ) - incompatible_fields = {"devices", "extra-bindings", "storage"} & self.yaml_data.keys() + incompatible_fields = { + "devices", + "extra-bindings", + "storage", + } & self.yaml_data.keys() if incompatible_fields: raise ExtensionError( f"the '{self.framework}-framework' extension is incompatible with the provided " @@ -100,7 +104,9 @@ def _check_input(self) -> None: user_provided: dict[str, Any] = self._get_nested(self.yaml_data, merging) if not user_provided: continue - overlap = user_provided.keys() & self._get_nested(root_snippet, merging).keys() + overlap = ( + user_provided.keys() & self._get_nested(root_snippet, merging).keys() + ) if overlap: raise ExtensionError( f"overlapping keys {overlap} in {merging} of charmcraft.yaml " @@ -272,6 +278,12 @@ class DjangoFramework(_AppBase): }, } + @staticmethod + @override + def is_experimental(base: tuple[str, ...] | None) -> bool: # noqa: ARG004 + """Check if the extension is in an experimental state.""" + return False + class GoFramework(_AppBase): """Extension for 12-factor Go applications.""" @@ -314,3 +326,56 @@ def get_image_name(self) -> str: def get_container_name(self) -> str: """Return name of the container for the app image.""" return "app" + + +class FastAPIFramework(_AppBase): + """Extension for 12-factor FastAPI applications.""" + + framework = "fastapi" + options = { + "webserver-workers": { + "type": "int", + "default": 1, + "description": "Number of workers for uvicorn. Sets env variable WEB_CONCURRENCY. See https://www.uvicorn.org/#command-line-options.", + }, + "webserver-port": { + "type": "int", + "default": 8080, + "description": "Bind to a socket with this port. Default: 8000. Sets env variable UVICORN_PORT.", + }, + "webserver-log-level": { + "type": "string", + "default": "info", + "description": "Set the log level. Options: 'critical', 'error', 'warning', 'info', 'debug', 'trace'. Sets the env variable UVICORN_LOG_LEVEL.", + }, + "metrics-port": { + "type": "int", + "default": 8080, + "description": "Port where the prometheus metrics will be scraped.", + }, + "metrics-path": { + "type": "string", + "default": "/metrics", + "description": "Path where the prometheus metrics will be scraped.", + }, + "app-secret-key": { + "type": "string", + "description": "Long secret you can use for sessions, csrf or any other thing where you need a random secret shared by all units", + }, + } + + @staticmethod + @override + def get_supported_bases() -> list[tuple[str, str]]: + """Return supported bases.""" + return [("ubuntu", "24.04")] + + @override + def get_image_name(self) -> str: + """Return name of the app image.""" + return "app-image" + + @override + def get_container_name(self) -> str: + """Return name of the container for the app image.""" + return "app" diff --git a/charmcraft/extensions/extension.py b/charmcraft/extensions/extension.py index eabc570ee..ccb152b66 100644 --- a/charmcraft/extensions/extension.py +++ b/charmcraft/extensions/extension.py @@ -106,7 +106,9 @@ def validate(self, extension_name: str): ) invalid_parts = [ - p for p in self.get_parts_snippet() if not p.startswith(f"{extension_name}/") + p + for p in self.get_parts_snippet() + if not p.startswith(f"{extension_name}/") ] if invalid_parts: raise ValueError( @@ -134,7 +136,9 @@ def append_to_env(env_variable: str, paths: Sequence[str], separator: str = ":") return f"${{{env_variable}:+${env_variable}{separator}}}" + separator.join(paths) -def prepend_to_env(env_variable: str, paths: Sequence[str], separator: str = ":") -> str: +def prepend_to_env( + env_variable: str, paths: Sequence[str], separator: str = ":" +) -> str: """Return a string for env_variable with one of more paths prepended. :param env_variable: the variable to operate on. diff --git a/charmcraft/extensions/registry.py b/charmcraft/extensions/registry.py index 3afd564e9..e55016be5 100644 --- a/charmcraft/extensions/registry.py +++ b/charmcraft/extensions/registry.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """Extension registry.""" + from typing import Any from charmcraft import errors diff --git a/charmcraft/jujuignore.py b/charmcraft/jujuignore.py index d715859b3..f2bed5823 100644 --- a/charmcraft/jujuignore.py +++ b/charmcraft/jujuignore.py @@ -232,6 +232,4 @@ def match(self, path: str, is_dir: bool) -> bool: /venv .jujuignore -""".split( - "\n" -) +""".split("\n") diff --git a/charmcraft/linters.py b/charmcraft/linters.py index d904659ea..40e639ca6 100644 --- a/charmcraft/linters.py +++ b/charmcraft/linters.py @@ -1,4 +1,4 @@ -# Copyright 2021-2022 Canonical Ltd. +# Copyright 2021-2024 Canonical Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,15 @@ # For further info, check https://github.com/canonical/charmcraft """Analyze and lint charm structures and files.""" + import abc import ast import os import pathlib +import re import shlex +import subprocess +import sys import typing from collections.abc import Generator from typing import final @@ -244,7 +248,9 @@ def _check_operator(self, basedir: pathlib.Path) -> bool: def _check_reactive(self, basedir: pathlib.Path) -> bool: """Detect if the Reactive Framework is used.""" try: - metadata = CharmMetadataLegacy.from_yaml_file(basedir / const.METADATA_FILENAME) + metadata = CharmMetadataLegacy.from_yaml_file( + basedir / const.METADATA_FILENAME + ) except Exception: # file not found, corrupted, or mandatory "name" not present return False @@ -252,7 +258,9 @@ def _check_reactive(self, basedir: pathlib.Path) -> bool: wheelhouse_dir = basedir / "wheelhouse" if not wheelhouse_dir.exists(): return False - if not any(f.name.startswith("charms.reactive-") for f in wheelhouse_dir.iterdir()): + if not any( + f.name.startswith("charms.reactive-") for f in wheelhouse_dir.iterdir() + ): return False module_basename = metadata.name.replace("-", "_") @@ -432,9 +440,13 @@ def _config_options_check(config_file: pathlib.Path) -> list[str]: return warnings with config_file.open("rt", encoding="utf8") as fh: - options = content.get("options", {}) if (content := yaml.safe_load(fh)) else {} + options = ( + content.get("options", {}) if (content := yaml.safe_load(fh)) else {} + ) - if check := NamingConventions.check_naming_convention(options.keys(), "config-options"): + if check := NamingConventions.check_naming_convention( + options.keys(), "config-options" + ): warnings.append(check) return warnings @@ -464,7 +476,9 @@ def _actions_check(action_file: pathlib.Path) -> list[str]: for param in content.get(action_name, {}).get("params", []) ] - if check := NamingConventions.check_naming_convention(actions_params, "action params"): + if check := NamingConventions.check_naming_convention( + actions_params, "action params" + ): warnings.append(check) return warnings @@ -505,7 +519,9 @@ def run(self, basedir: pathlib.Path) -> str: """Run the proper verifications.""" entrypoint = get_entrypoint_from_dispatch(basedir) if entrypoint is None: - self.text = "Cannot find a proper 'dispatch' script pointing to an entrypoint." + self.text = ( + "Cannot find a proper 'dispatch' script pointing to an entrypoint." + ) return self.Result.NONAPPLICABLE if not entrypoint.exists(): @@ -523,6 +539,101 @@ def run(self, basedir: pathlib.Path) -> str: return self.Result.OK +class OpsMainCall(Linter): + """Check that the entrypoint contains call to ops.main().""" + + name = "ops-main-call" + url = f"{BASE_DOCS_URL}#heading--ops-main-call" + text = "" + + def run(self, basedir: pathlib.Path) -> str: + """Check preconditions and validate there's an ops.main() call.""" + if Framework().run(basedir) != Framework.Result.OPERATOR: + self.text = "The charm is not based on the operator framework" + return self.Result.NONAPPLICABLE + + entrypoint = get_entrypoint_from_dispatch(basedir) + if entrypoint is None: + self.text = ( + "Cannot find a proper 'dispatch' script pointing to an entrypoint." + ) + return self.Result.NONAPPLICABLE + + if not entrypoint.exists(): + self.text = f"Cannot find the entrypoint file: {str(entrypoint)!r}" + return self.Result.NONAPPLICABLE + + if not self._check_main_calls(entrypoint.read_text()): + self.text = f"The ops.main() call missing from {str(entrypoint)!r}." + return self.Result.ERROR + + return self.Result.OK + + def _check_main_calls(self, code: str): + tree = ast.parse(code) + imports = self._ops_main_imports(tree) + return self._detect_main_calls(tree, imports=imports) + + def _ops_main_imports(self, tree: ast.AST) -> dict[str, str]: + """Analyze imports and return a mapping {local_name: imported thing}.""" + rv = {} + + class ImportVisitor(ast.NodeVisitor): + def visit_Import(self, node: ast.Import): # noqa: N802 + for alias in node.names: + # Detect statements like `import ops` + if alias.name == "ops": + rv[alias.asname or alias.name] = "ops" + if alias.name == "ops.main" and alias.asname: + rv[alias.asname] = "ops.main" + elif alias.name.startswith("ops.") and not alias.asname: + rv["ops"] = "ops" + + def visit_ImportFrom(self, node: ast.ImportFrom): # noqa: N802 + for alias in node.names: + # Detect statements like `from ops import main [as ops_main]` + if node.module in ("ops", "ops.main") and alias.name == "main": + rv[alias.asname or alias.name] = f"{node.module}.main" + + ImportVisitor().visit(tree) + return rv + + def _detect_main_calls(self, tree: ast.AST, *, imports: dict[str, str]) -> bool: + main_call_sites = [] + + class OpsMainFinder(ast.NodeVisitor): + def visit_Call(self, node: ast.Call): # noqa: N802 + match node.func: + # Matches statements like `ops.main.main(...)` + case ast.Attribute( + value=ast.Attribute(value=ast.Name(id=first), attr=second), + attr=third, + ): + call_site = f"{first}.{second}.{third}(...)" + # Matches statements like `ops.main(...)` + case ast.Attribute(value=ast.Name(id=first), attr=second): + call_site = f"{first}.{second}(...)" + # Matches statements like `main(...)` + case ast.Name(id=first): + call_site = f"{first}(...)" + case _: + call_site = "_dummy()" + + match = re.match(r"^([a-zA-Z_][a-zA-Z0-9_]*)(.*)", call_site) + if not match: + raise ValueError("impossible") + alias, rest = match.groups() + resolved = f"{imports.get(alias, '_dummy')}{rest}" + + if resolved in ("ops.main(...)", "ops.main.main(...)"): + main_call_sites.append(call_site) + + self.generic_visit(node) + + OpsMainFinder().visit(tree) + return any(main_call_sites) + + class AdditionalFiles(Linter): """Check that the charm does not contain any additional files in the prime directory. @@ -542,7 +653,9 @@ class AdditionalFiles(Linter): ) } - def _check_additional_files(self, stage_dir: pathlib.Path, prime_dir: pathlib.Path) -> str: + def _check_additional_files( + self, stage_dir: pathlib.Path, prime_dir: pathlib.Path + ) -> str: """Compare the staged files with the prime files.""" errors: list[str] = [] stage_dir = stage_dir.absolute() @@ -558,7 +671,9 @@ def _check_additional_files(self, stage_dir: pathlib.Path, prime_dir: pathlib.Pa errors.append(f"File '{prime_file}' is not staged but in the charm.") if errors: - self.text = "Error: Additional files found in the charm:\n" + "\n".join(errors) + self.text = "Error: Additional files found in the charm:\n" + "\n".join( + errors + ) return self.Result.ERROR return self.Result.OK @@ -568,12 +683,71 @@ def run(self, basedir: pathlib.Path) -> str: stage_dir = basedir.parent / "stage" if not stage_dir.exists() or not stage_dir.is_dir(): # Does not work without the build environment - self.text = "Additional files check not applicable without a build environment." + self.text = ( + "Additional files check not applicable without a build environment." + ) return self.Result.NONAPPLICABLE return self._check_additional_files(stage_dir, basedir) +class PipCheck(Linter): + """Check that the pip virtual environment is valid.""" + + name = "pip-check" + text = "Virtual environment is valid." + url = "https://pip.pypa.io/en/stable/cli/pip_check/" + + def run(self, basedir: pathlib.Path) -> str: + """Run pip check.""" + venv_dir = basedir / "venv" + if not venv_dir.is_dir(): + self.text = "Charm does not contain a Python venv." + return self.Result.NONAPPLICABLE + if not (venv_dir / "lib").is_dir(): + self.text = "Python venv is not valid." + return self.Result.NONAPPLICABLE + if sys.platform == "win32": + self.text = "Linter does not work on Windows." + return self.Result.NONAPPLICABLE + python_exe = venv_dir / "bin" / "python" + delete_parent = False + if not python_exe.parent.exists(): + delete_parent = True + python_exe.parent.mkdir() + if not python_exe.exists(): + delete_python_exe = True + python_exe.symlink_to(sys.executable) + else: + delete_python_exe = False + + pip_cmd = [sys.executable, "-m", "pip", "--python", str(python_exe), "check"] + try: + check = subprocess.run( + pip_cmd, + text=True, + capture_output=True, + check=False, + ) + if check.returncode == os.EX_OK: + result = self.Result.OK + else: + self.text = check.stdout + result = self.Result.WARNING + except (FileNotFoundError, PermissionError) as e: + self.text = ( + f"{e.strerror}: Could not run Python executable at {sys.executable}." + ) + result = self.Result.NONAPPLICABLE + finally: + if delete_python_exe: + python_exe.unlink() + if delete_parent: + python_exe.parent.rmdir() + + return result + + # all checkers to run; the order here is important, as some checkers depend on the # results from others CHECKERS: list[type[BaseChecker]] = [ @@ -584,5 +758,7 @@ def run(self, basedir: pathlib.Path) -> str: NamingConventions, Framework, Entrypoint, + OpsMainCall, AdditionalFiles, + PipCheck, ] diff --git a/charmcraft/models/basic.py b/charmcraft/models/basic.py index bb49e23a1..4cef986e2 100644 --- a/charmcraft/models/basic.py +++ b/charmcraft/models/basic.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """Charmcraft basic pydantic model.""" + from typing import Annotated import craft_parts.constraints diff --git a/charmcraft/models/charmcraft.py b/charmcraft/models/charmcraft.py index 991581fec..ade1309a8 100644 --- a/charmcraft/models/charmcraft.py +++ b/charmcraft/models/charmcraft.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """Charmcraft configuration pydantic model.""" + from typing import TypedDict, cast import pydantic @@ -45,8 +46,12 @@ class Charmhub(CraftBaseModel): """Definition of Charmhub endpoint configuration.""" api_url: pydantic.HttpUrl = cast(pydantic.HttpUrl, "https://api.charmhub.io") - storage_url: pydantic.HttpUrl = cast(pydantic.HttpUrl, "https://storage.snapcraftcontent.com") - registry_url: pydantic.HttpUrl = cast(pydantic.HttpUrl, "https://registry.jujucharms.com") + storage_url: pydantic.HttpUrl = cast( + pydantic.HttpUrl, "https://storage.snapcraftcontent.com" + ) + registry_url: pydantic.HttpUrl = cast( + pydantic.HttpUrl, "https://registry.jujucharms.com" + ) class Base(CraftBaseModel): diff --git a/charmcraft/models/config.py b/charmcraft/models/config.py index 75b3664dc..b82ed3caa 100644 --- a/charmcraft/models/config.py +++ b/charmcraft/models/config.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """Charmcraft Juju Config pydantic model.""" + from typing import Annotated, Literal import pydantic @@ -65,12 +66,17 @@ class JujuSecretOption(_BaseJujuOption): # the deployment in a model) is at the time that they are # writing the config, but included for completeness. default: ( - Annotated[str, pydantic.StringConstraints(pattern=r"^secret:[a-z0-9]{20}$")] | None + Annotated[str, pydantic.StringConstraints(pattern=r"^secret:[a-z0-9]{20}$")] + | None ) = None JujuOption = Annotated[ - JujuStringOption | JujuIntOption | JujuFloatOption | JujuBooleanOption | JujuSecretOption, + JujuStringOption + | JujuIntOption + | JujuFloatOption + | JujuBooleanOption + | JujuSecretOption, pydantic.Field(discriminator="type"), ] diff --git a/charmcraft/models/lint.py b/charmcraft/models/lint.py index 8352ad359..5b53399ae 100644 --- a/charmcraft/models/lint.py +++ b/charmcraft/models/lint.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Models for linters.""" + import enum from typing import final diff --git a/charmcraft/models/manifest.py b/charmcraft/models/manifest.py index 894e2d649..7fad49e34 100644 --- a/charmcraft/models/manifest.py +++ b/charmcraft/models/manifest.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Model for output charm's manifest.yaml file.""" + from typing import Any, Literal from craft_application import models diff --git a/charmcraft/models/project.py b/charmcraft/models/project.py index 1ff811271..171764166 100644 --- a/charmcraft/models/project.py +++ b/charmcraft/models/project.py @@ -20,6 +20,7 @@ import pathlib import re import textwrap +import warnings from collections.abc import Iterable, Iterator from typing import ( Annotated, @@ -28,11 +29,13 @@ cast, ) +import craft_platforms import pydantic import pydantic.v1 from craft_application import errors, models, util from craft_application.util import safe_yaml_load -from craft_cli import CraftError +from craft_cli import CraftError, emit +from craft_platforms import charm from craft_providers import bases from pydantic import dataclasses from typing_extensions import Self @@ -118,7 +121,9 @@ def _validate_api_version(cls, value: str) -> str: try: int(api) except ValueError: - raise ValueError(f"API version not valid. Expected an integer, got {api!r}") from None + raise ValueError( + f"API version not valid. Expected an integer, got {api!r}" + ) from None return str(value) @pydantic.field_validator("version", mode="before") @@ -332,7 +337,9 @@ def get_build_plan(self) -> list[models.BuildInfo]: platform=current_arch, build_on=current_arch, build_for=current_arch, - base=bases.BaseName(name=current_base.system, version=current_base.release), + base=bases.BaseName( + name=current_base.system, version=current_base.release + ), ) ] if not self.base: @@ -344,38 +351,28 @@ def get_build_plan(self) -> list[models.BuildInfo]: if self.platforms is None: raise CraftError("Must define at least one platform.") - build_infos = [] - for platform_name, platform in self.platforms.items(): - if platform is None: - if platform_name not in const.SUPPORTED_ARCHITECTURES: - raise CraftError( - f"Invalid platform {platform_name}.", - details="A platform name must either be a valid architecture name or the " - "platform must specify one or more build-on and build-for architectures.", - ) - build_infos.append( - models.BuildInfo( - platform_name, - build_on=platform_name, - build_for=platform_name, - base=base, - ) - ) - else: - # TODO: this should go to craft-platforms, so silence mypy for now. - for build_on in platform.build_on: # type: ignore[union-attr] - build_infos.extend( - [ - models.BuildInfo( - platform_name, - build_on=str(build_on), - build_for=str(build_for), - base=base, - ) - for build_for in platform.build_for # type: ignore[union-attr] - ] - ) - return build_infos + platforms = cast( + # https://github.com/canonical/craft-platforms/issues/43 + craft_platforms.Platforms, # pyright: ignore[reportPrivateImportUsage] + { + name: (platform.marshal() if platform else None) + for name, platform in self.platforms.items() + }, + ) + build_infos = charm.get_platforms_charm_build_plan( + base=self.base, + build_base=self.build_base, + platforms=platforms, + ) + return [ + models.BuildInfo( + platform=info.platform, + build_on=str(info.build_on), + build_for=str(info.build_for), + base=base, + ) + for info in build_infos + ] class CharmcraftProject(models.Project, metaclass=abc.ABCMeta): @@ -404,7 +401,13 @@ class CharmcraftProject(models.Project, metaclass=abc.ABCMeta): ), ) charmhub: Charmhub | None = pydantic.Field( - default=None, description="(DEPRECATED): Configuration for accessing charmhub." + default=None, + description="(DEPRECATED): Configuration for accessing charmhub.", + deprecated=( + "The 'charmhub' field is deprecated and no longer used. It will be removed in a " + f"future release. Use the ${const.STORE_API_ENV_VAR}, ${const.STORE_STORAGE_ENV_VAR} " + f"and ${const.STORE_REGISTRY_ENV_VAR} environment variables instead." + ), ) parts: dict[str, dict[str, Any]] = pydantic.Field(default_factory=dict) @@ -508,7 +511,9 @@ def _preprocess_parts( ) -> dict[str, dict[str, Any]]: """Preprocess parts object for a charm or bundle, creating an implicit part if needed.""" if parts is not None and not isinstance(parts, dict): - raise TypeError("'parts' in charmcraft.yaml must conform to the charmcraft.yaml spec.") + raise TypeError( + "'parts' in charmcraft.yaml must conform to the charmcraft.yaml spec." + ) if not parts: if info.config and info.config.get("title") == "Bundle": parts = {"bundle": {"plugin": "bundle"}} @@ -531,6 +536,23 @@ def _preprocess_parts( part.setdefault("source", ".") return {name: process_part_config(part) for name, part in parts.items()} + @pydantic.model_validator(mode="after") + def _warn_charmhub_deprecated(self) -> Self: + repeat = False + with warnings.catch_warnings(record=True) as caught: + if self.charmhub: + repeat = True + for warning in caught: + if isinstance(warning.message, Warning): + message = warning.message.args[0] + else: + message = warning.message + emit.progress(f"WARNING: {message}", permanent=True) + if repeat: + for warning in caught: + warnings.warn(warning.message, stacklevel=1) + return self + class CharmProject(CharmcraftProject): """A base class for all charm types.""" @@ -985,14 +1007,19 @@ def _check_base_is_legacy(base: charmcraft.BaseDict) -> bool: and base["channel"] < "24.04" # pyright: ignore[reportTypedDictNotRequiredAccess] ): return True - return base in ({"name": "centos", "channel": "7"}, {"name": "almalinux", "channel": "9"}) + return base in ( + {"name": "centos", "channel": "7"}, + {"name": "almalinux", "channel": "9"}, + ) def _validate_base( base: charmcraft.BaseDict | charmcraft.LongFormBasesDict, ) -> charmcraft.LongFormBasesDict: if "name" in base: # Convert short form to long form - base = cast(charmcraft.LongFormBasesDict, {"build-on": [base], "run-on": [base]}) + base = cast( + charmcraft.LongFormBasesDict, {"build-on": [base], "run-on": [base]} + ) else: # Cast to long form since we know it is one. base = cast(charmcraft.LongFormBasesDict, base) @@ -1022,9 +1049,9 @@ class BasesCharm(CharmProject): # This is defined this way because using conlist makes mypy sad and using # a ConstrainedList child class has pydantic issues. This appears to be # solved with Pydantic 2. - bases: list[Annotated[BasesConfiguration, pydantic.BeforeValidator(_validate_base)]] = ( - pydantic.Field(min_length=1) - ) + bases: list[ + Annotated[BasesConfiguration, pydantic.BeforeValidator(_validate_base)] + ] = pydantic.Field(min_length=1) base: None = None diff --git a/charmcraft/parts/__init__.py b/charmcraft/parts/__init__.py index eec52a538..4d496aaad 100644 --- a/charmcraft/parts/__init__.py +++ b/charmcraft/parts/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2021-2023 Canonical Ltd. +# Copyright 2021-2024 Canonical Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,28 +18,35 @@ from typing import Any -from craft_parts import plugins +import craft_parts from craft_parts.parts import PartSpec -from charmcraft.parts.bundle import BundlePlugin -from charmcraft.parts.charm import CharmPlugin, CharmPluginProperties +from . import plugins from charmcraft.parts.lifecycle import PartsLifecycle -from charmcraft.parts.reactive import ReactivePlugin, ReactivePluginProperties __all__ = [ - "CharmPlugin", - "CharmPluginProperties", - "ReactivePlugin", - "ReactivePluginProperties", + "plugins", + "get_app_plugins", "setup_parts", "process_part_config", "PartsLifecycle", ] -def setup_parts(): +def get_app_plugins() -> dict[str, type[craft_parts.plugins.Plugin]]: + """Get the app-specific plugins for Charmcraft.""" + return { + "bundle": plugins.BundlePlugin, + "charm": plugins.CharmPlugin, + "poetry": plugins.PoetryPlugin, + "python": plugins.PythonPlugin, + "reactive": plugins.ReactivePlugin, + } + + +def setup_parts() -> None: """Initialize craft-parts plugins.""" - plugins.register({"charm": CharmPlugin, "bundle": BundlePlugin, "reactive": ReactivePlugin}) + craft_parts.plugins.register(get_app_plugins()) def process_part_config(data: dict[str, Any]) -> dict[str, Any]: @@ -59,18 +66,20 @@ def process_part_config(data: dict[str, Any]) -> dict[str, Any]: if not plugin_name: raise ValueError("'plugin' not defined") - plugin_class = plugins.get_plugin_class(plugin_name) + plugin_class = craft_parts.plugins.get_plugin_class(plugin_name) # validate plugin properties plugin_properties = plugin_class.properties_class.unmarshal(spec) # validate common part properties - part_spec = plugins.extract_part_properties(spec, plugin_name=plugin_name) + part_spec = craft_parts.plugins.extract_part_properties( + spec, plugin_name=plugin_name + ) PartSpec(**part_spec) # get plugin properties data if it's model based (otherwise it's empty), and # update with the received config - if isinstance(plugin_properties, plugins.PluginProperties): + if isinstance(plugin_properties, craft_parts.plugins.PluginProperties): full_config = plugin_properties.model_dump(by_alias=True, exclude_unset=True) else: full_config = {} diff --git a/charmcraft/parts/lifecycle.py b/charmcraft/parts/lifecycle.py index a12c5aadd..2cff33b32 100644 --- a/charmcraft/parts/lifecycle.py +++ b/charmcraft/parts/lifecycle.py @@ -17,6 +17,7 @@ PENDING DEPRECATION: we're moving this to a craft-application LifecycleService """ + import os import pathlib import shlex @@ -88,7 +89,9 @@ def run(self, target_step: Step) -> None: charm_part = self._all_parts["charm"] if charm_part.get("plugin") == "charm": entrypoint = os.path.normpath(charm_part["charm-entrypoint"]) - dis_entrypoint = os.path.normpath(_get_dispatch_entrypoint(self.prime_dir)) + dis_entrypoint = os.path.normpath( + _get_dispatch_entrypoint(self.prime_dir) + ) if entrypoint != dis_entrypoint: self._lcm.clean(Step.BUILD, part_names=["charm"]) self._lcm.reload_state() @@ -100,8 +103,14 @@ def run(self, target_step: Step) -> None: with self._lcm.action_executor() as aex: executor_timer.mark("Context enter") for act in actions: - emit.progress(f"Running step {act.step.name} for part {act.part_name!r}") - with instrum.Timer("Running step", step=act.step.name, part=act.part_name): # type: ignore[arg-type] + emit.progress( + f"Running step {act.step.name} for part {act.part_name!r}" + ) + with instrum.Timer( + "Running step", + step=act.step.name, # type: ignore[arg-type] + part=act.part_name, # type: ignore[arg-type] + ): with emit.open_stream("Execute action") as stream: aex.execute([act], stdout=stream, stderr=stream) executor_timer.mark("Context exit") diff --git a/charmcraft/parts/plugins/__init__.py b/charmcraft/parts/plugins/__init__.py new file mode 100644 index 000000000..cb6d93045 --- /dev/null +++ b/charmcraft/parts/plugins/__init__.py @@ -0,0 +1,36 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft + +"""Craft-parts plugins and plugin overrides for charmcraft.""" + +from ._bundle import BundlePlugin, BundlePluginProperties +from ._charm import CharmPlugin, CharmPluginProperties +from ._poetry import PoetryPlugin, PoetryPluginProperties +from ._python import PythonPlugin, PythonPluginProperties +from ._reactive import ReactivePlugin, ReactivePluginProperties + +__all__ = [ + "BundlePlugin", + "BundlePluginProperties", + "CharmPlugin", + "CharmPluginProperties", + "PoetryPlugin", + "PoetryPluginProperties", + "PythonPlugin", + "PythonPluginProperties", + "ReactivePlugin", + "ReactivePluginProperties", +] diff --git a/charmcraft/parts/bundle.py b/charmcraft/parts/plugins/_bundle.py similarity index 99% rename from charmcraft/parts/bundle.py rename to charmcraft/parts/plugins/_bundle.py index 057131997..bff693cff 100644 --- a/charmcraft/parts/bundle.py +++ b/charmcraft/parts/plugins/_bundle.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Bundle plugin for craft-parts.""" + import sys from typing import Literal diff --git a/charmcraft/parts/charm.py b/charmcraft/parts/plugins/_charm.py similarity index 93% rename from charmcraft/parts/charm.py rename to charmcraft/parts/plugins/_charm.py index fcfa4b249..a93995b5e 100644 --- a/charmcraft/parts/charm.py +++ b/charmcraft/parts/plugins/_charm.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Charm plugin for craft-parts.""" + import os import pathlib import re @@ -55,7 +56,9 @@ class CharmPluginProperties(plugins.PluginProperties, frozen=True): """ @pydantic.field_validator("charm_entrypoint", mode="after") - def _validate_entrypoint(cls, charm_entrypoint: str, info: pydantic.ValidationInfo) -> str: + def _validate_entrypoint( + cls, charm_entrypoint: str, info: pydantic.ValidationInfo + ) -> str: """Validate the entry point.""" # the location of the project is needed if "source" not in info.data: @@ -67,11 +70,15 @@ def _validate_entrypoint(cls, charm_entrypoint: str, info: pydantic.ValidationIn # check that the entrypoint is inside the project filepath = (project_dirpath / charm_entrypoint).resolve() if project_dirpath not in filepath.parents: - raise ValueError(f"charm entry point must be inside the project: {str(filepath)!r}") + raise ValueError( + f"charm entry point must be inside the project: {str(filepath)!r}" + ) # store the entrypoint always relative to the project's path (no matter if the origin # was relative or absolute) - rel_entrypoint = (project_dirpath / charm_entrypoint).relative_to(project_dirpath) + rel_entrypoint = (project_dirpath / charm_entrypoint).relative_to( + project_dirpath + ) return rel_entrypoint.as_posix() @pydantic.model_validator(mode="after") @@ -90,7 +97,10 @@ def _validate_requirements(self) -> Self: # if nothing indicated, and default file is there, use it default_reqs_name = "requirements.txt" - if not self.charm_requirements and (project_dirpath / default_reqs_name).is_file(): + if ( + not self.charm_requirements + and (project_dirpath / default_reqs_name).is_file() + ): self.charm_requirements.append(default_reqs_name) return self @@ -188,7 +198,10 @@ def get_build_packages(self) -> set[str]: elif platform.is_yum_based(): try: os_release = os_utils.OsRelease() - if (os_release.id(), os_release.version_id()) in (("centos", "7"), ("rhel", "7")): + if (os_release.id(), os_release.version_id()) in ( + ("centos", "7"), + ("rhel", "7"), + ): # CentOS 7 Python 3.8 from SCL repo return { "autoconf", @@ -305,7 +318,10 @@ def _get_strict_dependencies_parameters(self) -> list[str]: options = cast(CharmPluginProperties, self._options) return [ "--strict-dependencies", - *(f"--binary-package={package}" for package in options.charm_binary_python_packages), + *( + f"--binary-package={package}" + for package in options.charm_binary_python_packages + ), *(f"--requirement={reqs}" for reqs in options.charm_requirements), ] @@ -324,7 +340,10 @@ def _get_legacy_dependencies_parameters(self) -> list[str]: base_tools.remove(pkg) os_release = os_utils.OsRelease() - if (os_release.id(), os_release.version_id()) in (("centos", "7"), ("rhel", "7")): + if (os_release.id(), os_release.version_id()) in ( + ("centos", "7"), + ("rhel", "7"), + ): # CentOS 7 compatibility, bootstrap base tools use binary packages for pkg in base_tools: parameters.extend(["-b", pkg]) @@ -354,7 +373,10 @@ def _get_os_special_priority_paths(self) -> str | None: """Return a str of PATH for special OS.""" with suppress(OsReleaseIdError, OsReleaseVersionIdError): os_release = os_utils.OsRelease() - if (os_release.id(), os_release.version_id()) in (("centos", "7"), ("rhel", "7")): + if (os_release.id(), os_release.version_id()) in ( + ("centos", "7"), + ("rhel", "7"), + ): # CentOS 7 Python 3.8 from SCL repo return "/opt/rh/rh-python38/root/usr/bin" diff --git a/charmcraft/parts/plugins/_poetry.py b/charmcraft/parts/plugins/_poetry.py new file mode 100644 index 000000000..5af1d1f2a --- /dev/null +++ b/charmcraft/parts/plugins/_poetry.py @@ -0,0 +1,99 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Charmcraft-specific poetry plugin.""" + +import pathlib +from pathlib import Path + +from craft_parts.plugins import poetry_plugin +from overrides import override + +from charmcraft import utils + + +class PoetryPluginProperties(poetry_plugin.PoetryPluginProperties, frozen=True): + poetry_keep_bins: bool = False + """Keep the virtual environment's 'bin' directory.""" + + +class PoetryPlugin(poetry_plugin.PoetryPlugin): + """Charmcraft-specific version of the poetry plugin.""" + + properties_class = PoetryPluginProperties + _options: PoetryPluginProperties # type: ignore[reportIncompatibleVariableOverride] + + def get_build_environment(self) -> dict[str, str]: + return utils.extend_python_build_environment(super().get_build_environment()) + + def _get_venv_directory(self) -> Path: + return self._part_info.part_install_dir / "venv" + + def _get_pip(self) -> str: + """Get the pip command to use.""" + return f"{self._get_system_python_interpreter()} -m pip --python=${{PARTS_PYTHON_VENV_INTERP_PATH}}" + + def _get_pip_install_commands(self, requirements_path: pathlib.Path) -> list[str]: + """Get the commands for installing with pip. + + This only installs the dependencies from requirements, unlike the upstream + version, because charms are not installable Python packages. + + :param requirements_path: The path of the requirements.txt file to write to. + :returns: A list of strings forming the install script. + """ + pip = self._get_pip() + return [ + # These steps need to be separate because poetry export defaults to including + # hashes, which don't work with installing from a directory. + f"{pip} install --no-deps '--requirement={requirements_path}'", + # Check that the virtualenv is consistent. + f"{pip} check", + ] + + def _get_package_install_commands(self) -> list[str]: + """Get the package installation commands. + + This overrides the generic class to also: + + 1. Copy the charm source into the charm. + 2. Copy the charmlibs into the charm. + """ + return [ + *super()._get_package_install_commands(), + *utils.get_charm_copy_commands( + self._part_info.part_build_dir, self._part_info.part_install_dir + ), + ] + + def _should_remove_symlinks(self) -> bool: + return True + + def _get_rewrite_shebangs_commands(self) -> list[str]: + """Get the commands used to rewrite shebangs in the install dir. + + Charms don't need the shebangs to be rewritten. + """ + return [] + + @override + def get_build_commands(self) -> list[str]: + """Get the build commands for the Python plugin.""" + return [ + *super().get_build_commands(), + *utils.get_venv_cleanup_commands( + self._get_venv_directory(), keep_bins=self._options.poetry_keep_bins + ), + ] diff --git a/charmcraft/parts/plugins/_python.py b/charmcraft/parts/plugins/_python.py new file mode 100644 index 000000000..999250085 --- /dev/null +++ b/charmcraft/parts/plugins/_python.py @@ -0,0 +1,104 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Charmcraft-specific poetry plugin.""" + +import shlex +from pathlib import Path + +from craft_parts.plugins import python_plugin +from overrides import override + +from charmcraft import utils + + +class PythonPluginProperties(python_plugin.PythonPluginProperties, frozen=True): + python_packages: list[str] = [] # No default packages. + python_keep_bins: bool = False + """Keep the virtual environment's 'bin' directory.""" + + +class PythonPlugin(python_plugin.PythonPlugin): + """Charmcraft-specific version of the python plugin.""" + + properties_class = PythonPluginProperties + _options: PythonPluginProperties # type: ignore[reportIncompatibleVariableOverride] + + @override + def get_build_environment(self) -> dict[str, str]: + return utils.extend_python_build_environment(super().get_build_environment()) + + @override + def _get_venv_directory(self) -> Path: + return self._part_info.part_install_dir / "venv" + + @override + def _get_pip(self) -> str: + """Get the pip command to use.""" + return f"{self._get_system_python_interpreter()} -m pip --python=${{PARTS_PYTHON_VENV_INTERP_PATH}}" + + @override + def _get_package_install_commands(self) -> list[str]: + """Get the package installation commands. + + This overrides the generic class in the following ways: + + 1. Doesn't try to install '.' (charms are not installable packages) + 2. Copy the charm source into the charm. + 3. Copy the charmlibs into the charm. + """ + pip = self._get_pip() + install_params = shlex.join( + ( + *( + f"--constraint={constraint}" + for constraint in self._options.python_constraints + ), + *( + f"--requirement={requirement}" + for requirement in self._options.python_requirements + ), + *self._options.python_packages, + ) + ) + return [ + f"{pip} install --no-deps {install_params}", + f"{pip} check", + *utils.get_charm_copy_commands( + self._part_info.part_build_dir, self._part_info.part_install_dir + ), + ] + + @override + def _should_remove_symlinks(self) -> bool: + return True + + @override + def _get_rewrite_shebangs_commands(self) -> list[str]: + """Get the commands used to rewrite shebangs in the install dir. + + Charms don't need the shebangs to be rewritten. + """ + return [] + + @override + def get_build_commands(self) -> list[str]: + """Get the build commands for the Python plugin.""" + return [ + *super().get_build_commands(), + *utils.get_venv_cleanup_commands( + self._get_venv_directory(), keep_bins=self._options.python_keep_bins + ), + ] diff --git a/charmcraft/parts/reactive.py b/charmcraft/parts/plugins/_reactive.py similarity index 97% rename from charmcraft/parts/reactive.py rename to charmcraft/parts/plugins/_reactive.py index 275979740..8bca89bcd 100644 --- a/charmcraft/parts/reactive.py +++ b/charmcraft/parts/plugins/_reactive.py @@ -147,7 +147,9 @@ def run_charm_tool(args: list[str]): result_classification = "ERROR" raise result_classification = "WARNING" - print(f"charm tool execution {result_classification}: returncode={exc.returncode}") + print( + f"charm tool execution {result_classification}: returncode={exc.returncode}" + ) else: print( f"charm tool execution {result_classification}: returncode={completed_process.returncode}" @@ -155,7 +157,11 @@ def run_charm_tool(args: list[str]): def build( - *, charm_name: str, build_dir: Path, install_dir: Path, charm_build_arguments: list[str] + *, + charm_name: str, + build_dir: Path, + install_dir: Path, + charm_build_arguments: list[str], ): """Build a charm using charm tool. diff --git a/charmcraft/preprocess.py b/charmcraft/preprocess.py index f114a87e3..c20a931a8 100644 --- a/charmcraft/preprocess.py +++ b/charmcraft/preprocess.py @@ -18,6 +18,7 @@ These functions are called from the Application class's `_extra_yaml_transform` to do pre-processing on a charmcraft.yaml file before applying extensions. """ + import pathlib from typing import Any diff --git a/charmcraft/services/__init__.py b/charmcraft/services/__init__.py index 6f00f865d..c4ccb43f1 100644 --- a/charmcraft/services/__init__.py +++ b/charmcraft/services/__init__.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Service classes charmcraft.""" + from __future__ import annotations import dataclasses @@ -23,6 +24,7 @@ from craft_application import ServiceFactory from .analysis import AnalysisService +from .charmlibs import CharmLibsService from .image import ImageService from .lifecycle import LifecycleService from .package import PackageService @@ -39,6 +41,7 @@ class CharmcraftServiceFactory(ServiceFactory): LifecycleClass: type[LifecycleService] = LifecycleService ProviderClass: type[ProviderService] = ProviderService AnalysisClass: type[AnalysisService] = AnalysisService + CharmLibsClass: type[CharmLibsService] = CharmLibsService StoreClass: type[StoreService] = StoreService RemoteBuildClass: type[RemoteBuildService] = RemoteBuildService ImageClass: type[ImageService] = ImageService @@ -47,6 +50,7 @@ class CharmcraftServiceFactory(ServiceFactory): # Cheeky hack that lets static type checkers report the correct types. # Any apps that add their own services should do this too. analysis: AnalysisService = None # type: ignore[assignment] + charm_libs: CharmLibsService = None # type: ignore[assignment] image: ImageService = None # type: ignore[assignment] lifecycle: LifecycleService = None # type: ignore[assignment] package: PackageService = None # type: ignore[assignment] diff --git a/charmcraft/services/analysis.py b/charmcraft/services/analysis.py index 04911fefa..6565091cb 100644 --- a/charmcraft/services/analysis.py +++ b/charmcraft/services/analysis.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """Service class for packing.""" + from __future__ import annotations import pathlib @@ -34,12 +35,18 @@ class AnalysisService(craft_application.AppService): _project: models.CharmcraftProject # type: ignore[assignment] def __init__( # (too many arguments) - self, app: craft_application.AppMetadata, services: craft_application.ServiceFactory + self, + app: craft_application.AppMetadata, + services: craft_application.ServiceFactory, ) -> None: super().__init__(app, services) def lint_directory( - self, path: pathlib.Path, *, ignore: Container[str] = (), include_ignored: bool = True + self, + path: pathlib.Path, + *, + ignore: Container[str] = (), + include_ignored: bool = True, ) -> Iterator[CheckResult]: """Lint an unpacked charm in the given directory.""" for checker, run in self._gen_checkers(ignore=ignore): @@ -49,7 +56,11 @@ def lint_directory( yield checker.get_ignore_result() def lint_file( - self, path: pathlib.Path, *, ignore: Container[str] = (), include_ignored: bool = True + self, + path: pathlib.Path, + *, + ignore: Container[str] = (), + include_ignored: bool = True, ) -> Iterator[CheckResult]: """Lint a packed charm. @@ -61,7 +72,9 @@ def lint_file( """ path = path.resolve(strict=True) - with tempfile.TemporaryDirectory(prefix=f"charmcraft_{path.name}_") as directory: + with tempfile.TemporaryDirectory( + prefix=f"charmcraft_{path.name}_" + ) as directory: directory_path = pathlib.Path(directory) try: with zipfile.ZipFile(path) as zip_file: @@ -85,7 +98,9 @@ def lint_file( ) @staticmethod - def _gen_checkers(ignore: Container[str]) -> Iterator[tuple[linters.BaseChecker, bool]]: + def _gen_checkers( + ignore: Container[str], + ) -> Iterator[tuple[linters.BaseChecker, bool]]: """Generate the checker classes to run, in their correct order.""" for cls in linters.CHECKERS: run_linter = cls.name not in ignore diff --git a/charmcraft/services/charmlibs.py b/charmcraft/services/charmlibs.py new file mode 100644 index 000000000..74d71156d --- /dev/null +++ b/charmcraft/services/charmlibs.py @@ -0,0 +1,98 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Service class for interacting with charm libraries.""" + +from __future__ import annotations + +import pathlib + +import craft_application + +from charmcraft import models, utils +from charmcraft.store.models import Library + + +class CharmLibsService(craft_application.ProjectService): + """Business logic for creating packages.""" + + _project: models.CharmcraftProject # type: ignore[assignment] + + def __init__( + self, + app: craft_application.AppMetadata, + services: craft_application.ServiceFactory, + *, + project: models.CharmcraftProject, + project_dir: pathlib.Path, + ) -> None: + super().__init__(app, services, project=project) + self._project_dir = project_dir + + def is_downloaded( + self, *, charm_name: str, lib_name: str, api: int, patch: int | None = None + ) -> bool: + """Check if the given charm lib is already downloaded on disk. + + :param charm_name: The name of the charm the lib is attached to. + :param lib_name: The name of the lib itself. + :param api: The api version of the lib + :param patch: If given, the specific patch version of the lib. + """ + lib_path = utils.get_lib_path(charm_name, lib_name, api) + if not (self._project_dir / lib_path).exists(): + return False + + if patch is None: + return True + + lib_info = utils.get_lib_info(lib_path=self._project_dir / lib_path) + return lib_info.patch == patch + + def get_local_version( + self, *, charm_name: str, lib_name: str + ) -> tuple[int, int] | None: + """Get the version of the library on the machine, or None. + + :param charm_name: The name of the charm where the lib is published + :param lib_name: The name of the library itself + :returns: Either the version of the library as a pair of integers or None + if the library cannot be found. + """ + charm_libs_path = self._project_dir / utils.get_lib_charm_path(charm_name) + if not charm_libs_path.is_dir(): + return None + for api_version_path in charm_libs_path.iterdir(): + lib_path = api_version_path / f"{lib_name}.py" + if lib_path.exists() and lib_path.is_file() or lib_path.is_symlink(): + info = utils.get_lib_info(lib_path=lib_path) + if info.patch == -1: + return None + return (info.api, info.patch) + return None + + def write(self, library: Library) -> None: + """Write the given library to disk. + + :param library: A store library object with valid content. + """ + if library.content is None: + # This should be considered an internal error. + raise ValueError("Library has no content.") + lib_path = self._project_dir / utils.get_lib_path( + library.charm_name, library.lib_name, library.api + ) + lib_path.parent.mkdir(parents=True, exist_ok=True) + lib_path.write_text(library.content) diff --git a/charmcraft/services/image.py b/charmcraft/services/image.py index 648e44309..cc2827b07 100644 --- a/charmcraft/services/image.py +++ b/charmcraft/services/image.py @@ -54,14 +54,20 @@ class ImageService(craft_application.AppService): """ _skopeo: utils.Skopeo - _docker: docker.DockerClient + _docker: docker.DockerClient | None @override def setup(self) -> None: """Set up the image service.""" super().setup() self._skopeo = utils.Skopeo(insecure_policy=True) - self._docker = docker.from_env() + try: + self._docker = docker.from_env() + except docker.errors.DockerException: + logger.debug( + "could not create Docker client. Docker may not be installed. Ignoring..." + ) + self._docker = None def copy( self, @@ -94,20 +100,42 @@ def copy( preserve_digests=True, ) - def get_maybe_id_from_docker(self, name: str) -> str | None: + @staticmethod + def get_name_from_url(url: str) -> str: + """Get the name of an image from a Docker URL or its name.""" + if "://" not in url: + return url + # Return only the name, even if something is on ghcr or somewhere. + return url.partition("://")[2] + + def get_maybe_id_from_docker(self, url: str) -> str | None: """Get the ID of an image from Docker. - :param name: Any string Docker recognises as the image name. + :param url: Any string Docker recognises as the image name or a docker:// url :returns: An image digest or None The digest will match the OCI digest spec: https://github.com/opencontainers/image-spec/blob/main/descriptor.md#digests """ + if self._docker is None: + return None + name = self.get_name_from_url(url) try: image = self._docker.images.get(name) except docker.errors.ImageNotFound: - return None - return image.id + logger.debug("Image not found in local Docker") + except docker.errors.APIError as exc: + logger.debug(f"API error when querying local Docker: {exc}", exc_info=exc) + else: + return image.id + return None + + @staticmethod + def convert_go_arch_to_charm_arch(architecture: str) -> const.CharmArch: + """Convert an OCI architecture to a charm architecture.""" + return const.CharmArch( + const.GO_ARCH_TO_CHARM_ARCH.get(architecture, architecture) + ) def inspect(self, image: str) -> OCIMetadata: """Inspect an image with Skopeo and return the relevant metadata. @@ -132,7 +160,13 @@ def inspect(self, image: str) -> OCIMetadata: platform = child.get("platform", {}) if platform.get("os") != "linux": continue - architectures.append(const.CharmArch(platform["architecture"])) + arch = platform["architecture"] + try: + charm_arch = self.convert_go_arch_to_charm_arch(arch) + except ValueError: + logger.debug(f"Ignoring unknown architecture {arch}") + continue + architectures.append(charm_arch) if not architectures: raise errors.CraftError("No architectures found in image for Linux OS.") else: diff --git a/charmcraft/services/lifecycle.py b/charmcraft/services/lifecycle.py index 4989793c1..b9710545b 100644 --- a/charmcraft/services/lifecycle.py +++ b/charmcraft/services/lifecycle.py @@ -14,12 +14,18 @@ # # For further info, check https://github.com/canonical/charmcraft """Service class for running craft lifecycle commands.""" + from __future__ import annotations +from typing import cast + +import craft_parts from craft_application import services, util from craft_cli import emit from overrides import override +from charmcraft import dispatch + class LifecycleService(services.LifecycleService): """Business logic for lifecycle builds.""" @@ -55,3 +61,13 @@ def _get_build_for(self) -> str: return arch return host_arch + + @override + def post_prime(self, step_info: craft_parts.StepInfo) -> bool: + return_value = super().post_prime(step_info) + + project_info = cast(craft_parts.ProjectInfo, step_info.project_info) + # TODO: include an entrypoint override. #1896 + return return_value | dispatch.create_dispatch( + prime_dir=project_info.dirs.prime_dir + ) diff --git a/charmcraft/services/package.py b/charmcraft/services/package.py index e06664adc..beb39e814 100644 --- a/charmcraft/services/package.py +++ b/charmcraft/services/package.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """Service class for packing.""" + from __future__ import annotations import json @@ -63,7 +64,9 @@ def __init__( project_dir: pathlib.Path, build_plan: list[craft_application.models.BuildInfo], ) -> None: - super().__init__(app, services, project=cast(craft_application.models.Project, project)) + super().__init__( + app, services, project=cast(craft_application.models.Project, project) + ) self.project_dir = project_dir.resolve(strict=True) self._platform = build_plan[0].platform self._build_plan = build_plan @@ -95,7 +98,9 @@ def _write_package_paths(self, packages: Iterable[pathlib.Path]) -> None: with packages_file.open("at") as file: file.writelines(f"{package.name}\n" for package in packages) - def pack_bundle(self, prime_dir: pathlib.Path, dest_dir: pathlib.Path) -> pathlib.Path: + def pack_bundle( + self, prime_dir: pathlib.Path, dest_dir: pathlib.Path + ) -> pathlib.Path: """Pack a prime directory as a bundle.""" name = self._project.name or "bundle" bundle_path = dest_dir / f"{name}.zip" @@ -103,7 +108,9 @@ def pack_bundle(self, prime_dir: pathlib.Path, dest_dir: pathlib.Path) -> pathli utils.build_zip(bundle_path, prime_dir) return bundle_path - def pack_charm(self, prime_dir: pathlib.Path, dest_dir: pathlib.Path) -> pathlib.Path: + def pack_charm( + self, prime_dir: pathlib.Path, dest_dir: pathlib.Path + ) -> pathlib.Path: """Pack a prime directory as a charm for a given set of bases.""" charm_path = self.get_charm_path(dest_dir) emit.progress(f"Packing charm {charm_path.name}") @@ -214,11 +221,15 @@ def get_manifest_bases(self) -> list[models.Base]: if platform.build_for: architectures = [str(arch) for arch in platform.build_for] else: - raise ValueError(f"Platform {self._platform} contains unknown build-for.") + raise ValueError( + f"Platform {self._platform} contains unknown build-for." + ) else: architectures = [util.get_host_architecture()] return [models.Base.from_str_and_arch(self._project.base, architectures)] - raise TypeError(f"Unknown charm type {self._project.__class__}, cannot get bases.") + raise TypeError( + f"Unknown charm type {self._project.__class__}, cannot get bases." + ) def write_metadata(self, path: pathlib.Path) -> None: """Write additional charm metadata. @@ -248,7 +259,10 @@ def write_metadata(self, path: pathlib.Path) -> None: (path / "manifest.yaml").write_text( utils.dump_yaml( manifest.model_dump( - mode="json", by_alias=True, exclude_unset=False, exclude_none=True + mode="json", + by_alias=True, + exclude_unset=False, + exclude_none=True, ) ) ) @@ -256,7 +270,9 @@ def write_metadata(self, path: pathlib.Path) -> None: project_dict = self._project.marshal() # If there is a reactive part, defer to it for the existence of metadata.yaml. - plugins = {part.get("plugin") or name for name, part in self._project.parts.items()} + plugins = { + part.get("plugin") or name for name, part in self._project.parts.items() + } is_reactive = "reactive" in plugins stage_dir = self._services.lifecycle.project_info.dirs.stage_dir if is_reactive and (stage_dir / const.METADATA_FILENAME).exists(): @@ -264,9 +280,13 @@ def write_metadata(self, path: pathlib.Path) -> None: f"{const.METADATA_FILENAME!r} generated by charm. Not using original project metadata." ) else: - self._write_file_or_object(self.metadata.marshal(), const.METADATA_FILENAME, path) + self._write_file_or_object( + self.metadata.marshal(), const.METADATA_FILENAME, path + ) if is_reactive and (stage_dir / const.JUJU_ACTIONS_FILENAME).exists(): - emit.debug(f"{const.JUJU_ACTIONS_FILENAME!r} generated by charm. Skipping generation.") + emit.debug( + f"{const.JUJU_ACTIONS_FILENAME!r} generated by charm. Skipping generation." + ) elif actions := cast(dict | None, project_dict.get("actions")): self._write_file_or_object(actions, "actions.yaml", path) if config := cast(dict | None, project_dict.get("config")): diff --git a/charmcraft/services/provider.py b/charmcraft/services/provider.py index cefcb9ad9..704a98f6f 100644 --- a/charmcraft/services/provider.py +++ b/charmcraft/services/provider.py @@ -15,21 +15,58 @@ # For further info, check https://github.com/canonical/charmcraft """Service class for creating providers.""" + from __future__ import annotations +import contextlib +import io +from collections.abc import Generator + +from craft_application.models import BuildInfo + +try: + import fcntl +except ModuleNotFoundError: # Not available on Windows. + fcntl = None # type: ignore[assignment] import os import pathlib +from typing import cast +import craft_application import craft_providers from craft_application import services +from craft_cli import emit from craft_providers import bases -from charmcraft import env +from charmcraft import env, models class ProviderService(services.ProviderService): """Business logic for getting providers.""" + def __init__( + self, + app: craft_application.AppMetadata, + services: craft_application.ServiceFactory, + *, + project: models.CharmcraftProject, + work_dir: pathlib.Path, + build_plan: list[BuildInfo], + provider_name: str | None = None, + install_snap: bool = True, + ) -> None: + super().__init__( + app, + services, + project=project, + work_dir=work_dir, + build_plan=build_plan, + provider_name=provider_name, + install_snap=install_snap, + ) + self._cache_path: pathlib.Path | None = None + self._lock: io.TextIOBase | None = None + def setup(self) -> None: """Set up the provider service for Charmcraft.""" super().setup() @@ -56,12 +93,65 @@ def get_base( If no cache_path is included, adds one. """ + self._cache_path = cast( + pathlib.Path, kwargs.get("cache_path", env.get_host_shared_cache_path()) + ) + self._lock = _maybe_lock_cache(self._cache_path) + # Forward the shared cache path. - if "cache_path" not in kwargs: - kwargs["cache_path"] = env.get_host_shared_cache_path() + kwargs["cache_path"] = self._cache_path if self._lock else None return super().get_base( base_name, instance_name=instance_name, # craft-application annotation is incorrect **kwargs, # type: ignore[arg-type] ) + + @contextlib.contextmanager + def instance( + self, + build_info: BuildInfo, + *, + work_dir: pathlib.Path, + allow_unstable: bool = True, + **kwargs: bool | str | None, + ) -> Generator[craft_providers.Executor, None, None]: + """Instance override for Charmcraft.""" + with super().instance( + build_info, + work_dir=work_dir, + allow_unstable=allow_unstable, + **kwargs, # type: ignore[arg-type] + ) as instance: + try: + yield instance + finally: + if fcntl is not None and self._lock: + fcntl.flock(self._lock, fcntl.LOCK_UN) + self._lock.close() + + +def _maybe_lock_cache(path: pathlib.Path) -> io.TextIOBase | None: + """Lock the cache so we only have one copy of Charmcraft using it at a time.""" + if fcntl is None: # Don't lock on Windows - just don't cache. + return None + cache_lock_path = path / "charmcraft.lock" + + emit.trace("Attempting to lock the cache path") + lock_file = cache_lock_path.open("w+") + try: + # Exclusive lock, but non-blocking. + fcntl.flock(lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB) + except OSError: + emit.progress( + "Shared cache locked by another process; running without cache.", + permanent=True, + ) + return None + else: + pid = str(os.getpid()) + lock_file.write(pid) + lock_file.flush() + os.fsync(lock_file.fileno()) + emit.trace(f"Cache path locked by this process ({pid})") + return lock_file diff --git a/charmcraft/services/remotebuild.py b/charmcraft/services/remotebuild.py index b537a72ca..9d45da75e 100644 --- a/charmcraft/services/remotebuild.py +++ b/charmcraft/services/remotebuild.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Charmcraft-specific overrides for the remote build service.""" + import datetime import pathlib from collections.abc import Mapping diff --git a/charmcraft/services/store.py b/charmcraft/services/store.py index 90334fb0f..f5c562504 100644 --- a/charmcraft/services/store.py +++ b/charmcraft/services/store.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Service class for store interaction.""" + from __future__ import annotations import platform @@ -207,17 +208,23 @@ def set_resource_revisions_architectures( *( models.CharmResourceRevisionUpdateRequest( revision=revision, - bases=[models.RequestCharmResourceBase(architectures=architectures)], + bases=[ + models.RequestCharmResourceBase(architectures=architectures) + ], ) for revision, architectures in updates.items() ), name=name, resource_name=resource_name, ) - new_revisions = self.client.list_resource_revisions(name=name, resource_name=resource_name) + new_revisions = self.client.list_resource_revisions( + name=name, resource_name=resource_name + ) return [rev for rev in new_revisions if int(rev.revision) in updates] - def get_libraries_metadata(self, libraries: Sequence[CharmLib]) -> Sequence[Library]: + def get_libraries_metadata( + self, libraries: Sequence[CharmLib] + ) -> Sequence[Library]: """Get the metadata for one or more charm libraries. :param libraries: A sequence of libraries to request. @@ -249,7 +256,12 @@ def get_libraries_metadata_by_name( } def get_library( - self, charm_name: str, *, library_id: str, api: int | None = None, patch: int | None = None + self, + charm_name: str, + *, + library_id: str, + api: int | None = None, + patch: int | None = None, ) -> Library: """Get a library by charm name and ID from charmhub.""" return self.anonymous_client.get_library( diff --git a/charmcraft/store/__init__.py b/charmcraft/store/__init__.py index 5bf9cc4d4..dd8362fe8 100644 --- a/charmcraft/store/__init__.py +++ b/charmcraft/store/__init__.py @@ -18,22 +18,12 @@ from charmcraft.store.client import build_user_agent, AnonymousClient, Client from charmcraft.store import models from charmcraft.store.models import LibraryMetadataRequest -from charmcraft.store.registry import ( - OCIRegistry, - HashingTemporaryFile, - LocalDockerdInterface, - ImageHandler, -) from charmcraft.store.store import Store, AUTH_DEFAULT_TTL, AUTH_DEFAULT_PERMISSIONS __all__ = [ "build_user_agent", "AnonymousClient", "Client", - "OCIRegistry", - "HashingTemporaryFile", - "ImageHandler", - "LocalDockerdInterface", "AUTH_DEFAULT_PERMISSIONS", "AUTH_DEFAULT_TTL", "Store", diff --git a/charmcraft/store/client.py b/charmcraft/store/client.py index 3bcbce428..006fdb5df 100644 --- a/charmcraft/store/client.py +++ b/charmcraft/store/client.py @@ -39,7 +39,9 @@ def build_user_agent(): """Build the charmcraft's user agent.""" - if any(key.startswith(prefix) for prefix in TESTING_ENV_PREFIXES for key in os.environ): + if any( + key.startswith(prefix) for prefix in TESTING_ENV_PREFIXES for key in os.environ + ): testing = " (testing) " else: testing = " " @@ -53,15 +55,23 @@ class AnonymousClient: def __init__(self, api_base_url: str, storage_base_url: str): self.api_base_url = api_base_url.rstrip("/") self.storage_base_url = storage_base_url.rstrip("/") - self._http_client = craft_store.http_client.HTTPClient(user_agent=build_user_agent()) + self._http_client = craft_store.http_client.HTTPClient( + user_agent=build_user_agent() + ) def request_urlpath_text(self, method: str, urlpath: str, *args, **kwargs) -> str: """Return a request.Response to a urlpath.""" - return self._http_client.request(method, self.api_base_url + urlpath, *args, **kwargs).text + return self._http_client.request( + method, self.api_base_url + urlpath, *args, **kwargs + ).text - def request_urlpath_json(self, method: str, urlpath: str, *args, **kwargs) -> dict[str, Any]: + def request_urlpath_json( + self, method: str, urlpath: str, *args, **kwargs + ) -> dict[str, Any]: """Return .json() from a request.Response to a urlpath.""" - response = self._http_client.request(method, self.api_base_url + urlpath, *args, **kwargs) + response = self._http_client.request( + method, self.api_base_url + urlpath, *args, **kwargs + ) try: return response.json() @@ -71,7 +81,12 @@ def request_urlpath_json(self, method: str, urlpath: str, *args, **kwargs) -> di ) from json_error def get_library( - self, *, charm_name: str, library_id: str, api: int | None = None, patch: int | None = None + self, + *, + charm_name: str, + library_id: str, + api: int | None = None, + patch: int | None = None, ) -> Library: """Fetch a library attached to a charm. @@ -100,10 +115,13 @@ def fetch_libraries_metadata( emit.trace( f"Fetching library metadata from charmhub: {libs}", ) - response = self.request_urlpath_json("POST", "/v1/charm/libraries/bulk", json=libs) + response = self.request_urlpath_json( + "POST", "/v1/charm/libraries/bulk", json=libs + ) if "libraries" not in response: raise CraftError( - "Server returned invalid response while querying libraries", details=str(response) + "Server returned invalid response while querying libraries", + details=str(response), ) converted_response = [Library.from_dict(lib) for lib in response["libraries"]] emit.trace(f"Store response: {converted_response}") @@ -130,7 +148,9 @@ def __init__( Supports both charmcraft 2.x style init and compatibility with upstream. """ if base_url and api_base_url or not base_url and not api_base_url: - raise ValueError("Either base_url or api_base_url must be set, but not both.") + raise ValueError( + "Either base_url or api_base_url must be set, but not both." + ) if base_url: api_base_url = base_url self.api_base_url = api_base_url.rstrip("/") @@ -166,9 +186,13 @@ def logout(self, *args, **kwargs): def request_urlpath_text(self, method: str, urlpath: str, *args, **kwargs) -> str: """Return a request.Response to a urlpath.""" - return super().request(method, self.api_base_url + urlpath, *args, **kwargs).text + return ( + super().request(method, self.api_base_url + urlpath, *args, **kwargs).text + ) - def request_urlpath_json(self, method: str, urlpath: str, *args, **kwargs) -> dict[str, Any]: + def request_urlpath_json( + self, method: str, urlpath: str, *args, **kwargs + ) -> dict[str, Any]: """Return .json() from a request.Response to a urlpath.""" response = super().request(method, self.api_base_url + urlpath, *args, **kwargs) @@ -190,7 +214,9 @@ def push_file(self, filepath) -> str: # create a monitor (so that progress can be displayed) as call the real pusher monitor = MultipartEncoderMonitor(encoder) - with emit.progress_bar("Uploading...", monitor.len, delta=False) as progress: + with emit.progress_bar( + "Uploading...", monitor.len, delta=False + ) as progress: monitor.callback = lambda mon: progress.advance(mon.bytes_read) response = self._storage_push(monitor) @@ -207,6 +233,9 @@ def _storage_push(self, monitor) -> requests.Response: return super().request( "POST", self.storage_base_url + "/unscanned-upload/", - headers={"Content-Type": monitor.content_type, "Accept": "application/json"}, + headers={ + "Content-Type": monitor.content_type, + "Accept": "application/json", + }, data=monitor, ) diff --git a/charmcraft/store/registry.py b/charmcraft/store/registry.py deleted file mode 100644 index 53d8027b6..000000000 --- a/charmcraft/store/registry.py +++ /dev/null @@ -1,463 +0,0 @@ -# Copyright 2021-2022 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# For further info, check https://github.com/canonical/charmcraft - -"""Module to work with OCI registries.""" - -import base64 -import gzip -import hashlib -import io -import json -import os -import tarfile -import tempfile -from typing import Any -from urllib.request import parse_http_list, parse_keqv_list - -import requests -import requests_unixsocket # type: ignore[import-untyped] -from craft_cli import CraftError, emit - -# some mimetypes -CONFIG_MIMETYPE = "application/vnd.docker.container.image.v1+json" -MANIFEST_V2_MIMETYPE = "application/vnd.docker.distribution.manifest.v2+json" -LAYER_MIMETYPE = "application/vnd.docker.image.rootfs.diff.tar.gzip" -JSON_RELATED_MIMETYPES = { - "application/json", - "application/vnd.docker.distribution.manifest.v1+prettyjws", # signed manifest - MANIFEST_V2_MIMETYPE, -} -OCTET_STREAM_MIMETYPE = "application/octet-stream" - -# downloads and uploads happen in chunks; this size is mostly driven by the usage in the upload -# blob, where the cost in time is similar for small and large chunks (we need to balance having -# it large enough for speed, but not too large because of memory consumption) -CHUNK_SIZE = 2**20 - - -def assert_response_ok( - response: requests.Response, expected_status: int = 200 -) -> dict[str, Any] | None: - """Assert the response is ok.""" - if response.status_code != expected_status: - ct = response.headers.get("Content-Type", "") - if ct.split(";")[0] in JSON_RELATED_MIMETYPES: - errors = response.json().get("errors") - else: - errors = None - raise CraftError( - "Wrong status code from server " - f"(expected={expected_status}, got={response.status_code})", - details=f"errors={errors} headers={response.headers}", - ) - - if response.headers.get("Content-Type") not in JSON_RELATED_MIMETYPES: - return None - - result = response.json() - if "errors" in result: - raise CraftError("Response with errors from server: {}".format(result["errors"])) - return result - - -class OCIRegistry: - """Interface to a generic OCI Registry.""" - - def __init__(self, server, image_name, *, username="", password=""): - self.server = server - self.image_name = image_name - self.auth_token = None - - if username: - _u_p = f"{username}:{password}" - self.auth_encoded_credentials = base64.b64encode(_u_p.encode("ascii")).decode("ascii") - else: - self.auth_encoded_credentials = None - - def __eq__(self, other): - return ( - self.server == other.server - and self.image_name == other.image_name - and self.auth_encoded_credentials == other.auth_encoded_credentials - ) - - def _authenticate(self, auth_info): - """Get the auth token.""" - headers = {} - if self.auth_encoded_credentials is not None: - headers["Authorization"] = f"Basic {self.auth_encoded_credentials}" - - emit.trace(f"Authenticating! {auth_info}") - url = "{realm}?service={service}&scope={scope}".format_map(auth_info) - response = requests.get(url, headers=headers) - - result = assert_response_ok(response) - return result["token"] - - def _get_url(self, subpath): - """Build the URL completing the subpath.""" - return f"{self.server}/v2/{self.image_name}/{subpath}" - - def _get_auth_info(self, response): - """Parse a 401 response and get the needed auth parameters.""" - www_auth = response.headers["Www-Authenticate"] - if not www_auth.startswith("Bearer "): - raise ValueError("Bearer not found") - return parse_keqv_list(parse_http_list(www_auth[7:])) - - def _hit(self, method, url, headers=None, log=True, **kwargs): - """Hit the specific URL, taking care of the authentication.""" - if headers is None: - headers = {} - if self.auth_token is not None: - headers["Authorization"] = f"Bearer {self.auth_token}" - - if log: - emit.trace(f"Hitting the registry: {method} {url}") - response = requests.request(method, url, headers=headers, **kwargs) - if response.status_code == 401: - # token expired or missing, let's get another one and retry - try: - auth_info = self._get_auth_info(response) - except (ValueError, KeyError) as exc: - raise CraftError(f"Bad 401 response: {exc}; headers: {response.headers!r}") - self.auth_token = self._authenticate(auth_info) - headers["Authorization"] = f"Bearer {self.auth_token}" - response = requests.request(method, url, headers=headers, **kwargs) - - return response - - def _is_item_already_uploaded(self, url): - """Verify if a generic item is uploaded.""" - response = self._hit("HEAD", url) - - if response.status_code == 200: - # item is there, done! - uploaded = True - elif response.status_code == 404: - # confirmed item is NOT there - uploaded = False - else: - # something else is going on, log what we have and return False so at least - # we can continue with the upload - emit.debug( - f"Bad response when checking for uploaded {url!r}: " - f"{response.status_code!r} (headers={response.headers})", - ) - uploaded = False - return uploaded - - def is_manifest_already_uploaded(self, reference): - """Verify if the manifest is already uploaded, using a generic reference. - - If yes, return its digest. - """ - emit.progress("Checking if manifest is already uploaded") - url = self._get_url(f"manifests/{reference}") - return self._is_item_already_uploaded(url) - - def is_blob_already_uploaded(self, reference): - """Verify if the blob is already uploaded, using a generic reference. - - If yes, return its digest. - """ - emit.progress("Checking if the blob is already uploaded") - url = self._get_url(f"blobs/{reference}") - return self._is_item_already_uploaded(url) - - def upload_manifest(self, manifest_data, reference): - """Upload a manifest.""" - url = self._get_url(f"manifests/{reference}") - headers = { - "Content-Type": MANIFEST_V2_MIMETYPE, - } - emit.progress(f"Uploading manifest with reference {reference}") - response = self._hit("PUT", url, headers=headers, data=manifest_data.encode("utf8")) - assert_response_ok(response, expected_status=201) - emit.progress("Manifest uploaded OK") - - def upload_blob(self, filepath, size, digest): - """Upload the blob from a file.""" - # get the first URL to start pushing the blob - emit.progress("Getting URL to push the blob") - url = self._get_url("blobs/uploads/") - response = self._hit("POST", url) - assert_response_ok(response, expected_status=202) - upload_url = response.headers["Location"] - range_from, range_to_inclusive = (int(x) for x in response.headers["Range"].split("-")) - emit.progress(f"Got upload URL ok with range {range_from}-{range_to_inclusive}") - if range_from != 0: - raise CraftError( - "Server error: bad range received", details=f"Range={response.headers['Range']!r}" - ) - - # this `range_to_inclusive` alteration is a side effect of the range being inclusive. The - # server tells us that it already has "0-80", means that it has 81 bytes (from 0 to 80 - # inclusive), we set from_position in 81 and read from there. Going down, "0-1" would mean - # it has bytes 0 and 1; But "0-0" is special, it's what the server returns when it does - # not have ANY bytes at all. So we comply with Range parameter, but addressing this - # special case; worst think it could happen is that we start from 0 when the server - # has 1 byte already, which is not a problem. - if range_to_inclusive == 0: - range_to_inclusive = -1 - from_position = range_to_inclusive + 1 - - # start the chunked upload - with open(filepath, "rb") as fh: - with emit.progress_bar("Uploading...", size) as progress: - if from_position: - fh.seek(from_position) - progress.advance(from_position) - - while True: - chunk = fh.read(CHUNK_SIZE) - if not chunk: - break - - progress.advance(len(chunk)) - end_position = from_position + len(chunk) - headers = { - "Content-Length": str(len(chunk)), - "Content-Range": f"{from_position}-{end_position}", - "Content-Type": OCTET_STREAM_MIMETYPE, - } - response = self._hit( - "PATCH", upload_url, headers=headers, data=chunk, log=False - ) - assert_response_ok(response, expected_status=202) - - upload_url = response.headers["Location"] - from_position += len(chunk) - headers = { - "Content-Length": "0", - "Connection": "close", - } - emit.progress("Closing the upload") - closing_url = f"{upload_url}&digest={digest}" - - response = self._hit("PUT", closing_url, headers=headers, data="") - assert_response_ok(response, expected_status=201) - emit.progress("Upload finished OK") - if response.headers["Docker-Content-Digest"] != digest: - raise CraftError("Server error: the upload is corrupted") - - -class HashingTemporaryFile(io.FileIO): - """A temporary file that keeps the hash and length of what is written.""" - - def __init__(self): - tmp_file = tempfile.NamedTemporaryFile(mode="wb", delete=False) - self.file_handler = tmp_file.file - super().__init__(tmp_file.name, mode="wb") - self.total_length = 0 - self.hasher = hashlib.sha256() - - @property - def hexdigest(self): - """Calculate the digest.""" - return self.hasher.hexdigest() - - def write(self, data): - """Intercept real write to feed hasher and length count.""" - self.total_length += len(data) - self.hasher.update(data) - super().write(data) - - -class LocalDockerdInterface: - """Functionality to interact with a local Docker daemon.""" - - # the address of the dockerd socket - dockerd_socket_baseurl = "http+unix://%2Fvar%2Frun%2Fdocker.sock" - - def __init__(self): - self.session = requests_unixsocket.Session() - - def get_image_info_from_id(self, image_id: str) -> dict | None: - """Get the info for a specific image using its id. - - Returns None to flag that the requested id was not found for any reason. - """ - url = self.dockerd_socket_baseurl + f"/images/{image_id}/json" - try: - response = self.session.get(url) - except requests.exceptions.ConnectionError: - emit.debug( - "Cannot connect to /var/run/docker.sock , please ensure dockerd is running.", - ) - return None - - if response.status_code == 200: - # image is there, we're fine - return response.json() - - # 404 is the standard response to "not found", if not exactly that let's log - # for proper debugging - if response.status_code != 404: - emit.debug(f"Bad response when validating local image: {response.status_code}") - return None - return None - - def get_image_info_from_digest(self, digest: str) -> dict | None: - """Get the info for a specific image using its digest. - - Returns None to flag that the requested digest was not found for any reason. - """ - url = self.dockerd_socket_baseurl + "/images/json" - try: - response = self.session.get(url) - except requests.exceptions.ConnectionError: - emit.debug( - "Cannot connect to /var/run/docker.sock , please ensure dockerd is running.", - ) - return None - - if response.status_code != 200: - emit.debug(f"Bad response when validating local image: {response.status_code}") - return None - - for image_info in response.json(): - if image_info["RepoDigests"] is None: - continue - if any(digest in repo_digest for repo_digest in image_info["RepoDigests"]): - return image_info - return None - - def get_streamed_image_content(self, image_id: str) -> requests.Response: - """Stream the content of a specific image.""" - url = self.dockerd_socket_baseurl + f"/images/{image_id}/get" - return self.session.get(url, stream=True) - - -class ImageHandler: - """Provide specific functionalities around images.""" - - def __init__(self, registry): - self.registry = registry - - def check_in_registry(self, digest: str) -> bool: - """Verify if the image is present in the registry.""" - return self.registry.is_manifest_already_uploaded(digest) - - def _extract_file( - self, image_tar: str, name: str, compress: bool = False - ) -> tuple[str, int, str]: - """Extract a file from the tar and return its info. Optionally, gzip the content.""" - emit.progress(f"Extracting file {name!r} from local tar (compress={compress})") - src_filehandler = image_tar.extractfile(name) - mtime = image_tar.getmember(name).mtime - - hashing_temp_file = HashingTemporaryFile() - if compress: - # open the gzip file using the temporary file handler; use the original name and time - # as 'filename' and 'mtime' correspondingly as those go to the gzip headers, - # to ensure same final hash across different runs - dst_filehandler = gzip.GzipFile( - fileobj=hashing_temp_file, - mode="wb", - filename=os.path.basename(name), - mtime=mtime, - ) - else: - dst_filehandler = hashing_temp_file - try: - while True: - chunk = src_filehandler.read(CHUNK_SIZE) - if not chunk: - break - dst_filehandler.write(chunk) - finally: - dst_filehandler.close() - # gzip does not automatically close the underlying file handler, let's do it manually - hashing_temp_file.close() - - digest = f"sha256:{hashing_temp_file.hexdigest}" - return hashing_temp_file.name, hashing_temp_file.total_length, digest - - def _upload_blob(self, filepath: str, size: int, digest: str) -> None: - """Upload the blob (if necessary).""" - # if it's already uploaded, nothing to do - if self.registry.is_blob_already_uploaded(digest): - emit.progress("Blob was already uploaded") - else: - self.registry.upload_blob(filepath, size, digest) - - # finally remove the temp filepath - os.unlink(filepath) - - def upload_from_local(self, image_info: dict[str, Any]) -> str | None: - """Upload the image from the local registry. - - Returns the new remote digest. - """ - dockerd = LocalDockerdInterface() - local_image_size = image_info["Size"] - image_id = image_info["Id"] - - emit.progress(f"Getting the image from the local repo; size={local_image_size}") - response = dockerd.get_streamed_image_content(image_id) - - tmp_exported = tempfile.NamedTemporaryFile(mode="wb", delete=False) - with emit.progress_bar("Reading image...", local_image_size) as progress: - for chunk in response.iter_content(CHUNK_SIZE): - progress.advance(len(chunk)) - tmp_exported.file.write(chunk) - tmp_exported.close() - - # open the image tar and inspect it to get the config and layers from the only one - # manifest inside (as it's a list of one) - image_tar = tarfile.open(tmp_exported.name) - local_manifest = json.load(image_tar.extractfile("manifest.json")) - (local_manifest,) = local_manifest - config_name = local_manifest.get("Config") - layer_names = local_manifest["Layers"] - manifest = { - "mediaType": MANIFEST_V2_MIMETYPE, - "schemaVersion": 2, - } - - if config_name is not None: - fpath, size, digest = self._extract_file(image_tar, config_name) - emit.progress(f"Uploading config blob, size={size}, digest={digest}") - self._upload_blob(fpath, size, digest) - manifest["config"] = { - "digest": digest, - "mediaType": CONFIG_MIMETYPE, - "size": size, - } - - manifest["layers"] = manifest_layers = [] - len_layers = len(layer_names) - for idx, layer_name in enumerate(layer_names, 1): - fpath, size, digest = self._extract_file(image_tar, layer_name, compress=True) - emit.progress(f"Uploading layer blob {idx}/{len_layers}, size={size}, digest={digest}") - self._upload_blob(fpath, size, digest) - manifest_layers.append( - { - "digest": digest, - "mediaType": LAYER_MIMETYPE, - "size": size, - } - ) - - # remove the temp tar file - os.unlink(tmp_exported.name) - - # upload the manifest - manifest_data = json.dumps(manifest) - digest = "sha256:{}".format(hashlib.sha256(manifest_data.encode("utf8")).hexdigest()) - self.registry.upload_manifest(manifest_data, digest) - return digest diff --git a/charmcraft/store/store.py b/charmcraft/store/store.py index 2e8517734..1f2e44fd0 100644 --- a/charmcraft/store/store.py +++ b/charmcraft/store/store.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """The Store API handling.""" + import os import pathlib import platform @@ -155,8 +156,12 @@ def error_decorator(self, *args, **kwargs): "Regenerate them and try again." ) if not auto_login: - raise CraftError("Existing credentials are no longer valid for Charmhub.") - emit.progress("Existing credentials no longer valid. Trying to log in...") + raise CraftError( + "Existing credentials are no longer valid for Charmhub." + ) + emit.progress( + "Existing credentials no longer valid. Trying to log in..." + ) # Clear credentials before trying to login again self.logout() else: @@ -178,14 +183,20 @@ def __init__(self, charmhub_config, ephemeral=False, needs_auth=True): if needs_auth: try: self._client = Client( - charmhub_config.api_url, charmhub_config.storage_url, ephemeral=ephemeral + charmhub_config.api_url, + charmhub_config.storage_url, + ephemeral=ephemeral, ) except craft_store.errors.NoKeyringError as error: raise CraftError(str(error)) from error else: - self._client = AnonymousClient(charmhub_config.api_url, charmhub_config.storage_url) + self._client = AnonymousClient( + charmhub_config.api_url, charmhub_config.storage_url + ) - def login(self, permissions=None, ttl=None, charms=None, bundles=None, channels=None): + def login( + self, permissions=None, ttl=None, charms=None, bundles=None, channels=None + ): """Login into the store.""" hostname = _get_hostname() # Used to identify the login on Ubuntu SSO to ease future revokations. @@ -201,11 +212,13 @@ def login(self, permissions=None, ttl=None, charms=None, bundles=None, channels= packages = [] if charms is not None: packages.extend( - endpoints.Package(package_type="charm", package_name=charm) for charm in charms + endpoints.Package(package_type="charm", package_name=charm) + for charm in charms ) if bundles is not None: packages.extend( - endpoints.Package(package_type="bundle", package_name=bundle) for bundle in bundles + endpoints.Package(package_type="bundle", package_name=bundle) + for bundle in bundles ) if packages: kwargs["packages"] = packages @@ -233,7 +246,9 @@ def whoami(self): response = self._client.whoami() acc = response["account"] - account = Account(name=acc["display-name"], username=acc["username"], id=acc["id"]) + account = Account( + name=acc["display-name"], username=acc["username"], id=acc["id"] + ) if response["packages"] is None: packages = None else: @@ -352,11 +367,15 @@ def upload_resource( @_store_client_wrapper() def list_revisions(self, name): """Return charm revisions for the indicated charm.""" - response = self._client.request_urlpath_json("GET", f"/v1/charm/{name}/revisions") + response = self._client.request_urlpath_json( + "GET", f"/v1/charm/{name}/revisions" + ) return [_build_revision(item) for item in response["revisions"]] @_store_client_wrapper() - def release(self, name: str, revision: int, channels: list[str], resources) -> dict[str, Any]: + def release( + self, name: str, revision: int, channels: list[str], resources + ) -> dict[str, Any]: """Release one or more revisions for a package.""" endpoint = f"/v1/charm/{name}/releases" resources = [{"name": res.name, "revision": res.revision} for res in resources] @@ -368,7 +387,9 @@ def release(self, name: str, revision: int, channels: list[str], resources) -> d return self._client.request_urlpath_json("POST", endpoint, json=items) @_store_client_wrapper() - def list_releases(self, name: str) -> tuple[list[Release], list[Channel], list[Revision]]: + def list_releases( + self, name: str + ) -> tuple[list[Release], list[Channel], list[Revision]]: """List current releases for a package.""" endpoint = f"/v1/charm/{name}/releases" response = self._client.request_urlpath_json("GET", endpoint) @@ -416,7 +437,9 @@ def create_library_id(self, charm_name, lib_name): return response["library-id"] @_store_client_wrapper() - def create_library_revision(self, charm_name, lib_id, api, patch, content, content_hash): + def create_library_revision( + self, charm_name, lib_id, api, patch, content, content_hash + ): """Create a new library revision.""" endpoint = f"/v1/charm/libraries/{charm_name}/{lib_id}" payload = { @@ -462,12 +485,17 @@ def get_libraries_tips(self, libraries): payload.append(item) response = self._client.request_urlpath_json("POST", endpoint, json=payload) libraries = response["libraries"] - return {(item["library-id"], item["api"]): _build_library(item) for item in libraries} + return { + (item["library-id"], item["api"]): _build_library(item) + for item in libraries + } @_store_client_wrapper() def list_resources(self, charm): """Return resources associated to the indicated charm.""" - response = self._client.request_urlpath_json("GET", f"/v1/charm/{charm}/resources") + response = self._client.request_urlpath_json( + "GET", f"/v1/charm/{charm}/resources" + ) return [_build_resource(item) for item in response["resources"]] @_store_client_wrapper() diff --git a/charmcraft/templates/init-django-framework/charmcraft.yaml.j2 b/charmcraft/templates/init-django-framework/charmcraft.yaml.j2 index 25632b2d9..c60140ed2 100644 --- a/charmcraft/templates/init-django-framework/charmcraft.yaml.j2 +++ b/charmcraft/templates/init-django-framework/charmcraft.yaml.j2 @@ -51,3 +51,7 @@ extensions: # interface: saml # optional: false # limit: 1 +# rabbitmq: +# interface: rabbitmq +# optional: false +# limit: 1 diff --git a/charmcraft/templates/init-django-framework/pyproject.toml.j2 b/charmcraft/templates/init-django-framework/pyproject.toml.j2 new file mode 100644 index 000000000..3cb1ce223 --- /dev/null +++ b/charmcraft/templates/init-django-framework/pyproject.toml.j2 @@ -0,0 +1,41 @@ +# Testing tools configuration +[tool.coverage.run] +branch = true + +[tool.coverage.report] +show_missing = true + +[tool.pytest.ini_options] +minversion = "6.0" +log_cli_level = "INFO" + +# Linting tools configuration +[tool.ruff] +line-length = 99 +lint.select = ["E", "W", "F", "C", "N", "D", "I001"] +lint.extend-ignore = [ + "D105", + "D107", + "D203", + "D204", + "D213", + "D215", + "D400", + "D404", + "D406", + "D407", + "D408", + "D409", + "D413", +] +extend-exclude = ["__pycache__", "*.egg_info"] +lint.per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]} + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.codespell] +skip = "build,lib,venv,icon.svg,.tox,.git,.mypy_cache,.ruff_cache,.coverage" + +[tool.pyright] +include = ["src/**.py"] diff --git a/charmcraft/templates/init-django-framework/requirements.txt.j2 b/charmcraft/templates/init-django-framework/requirements.txt.j2 index acab50eb1..d58a30c21 100644 --- a/charmcraft/templates/init-django-framework/requirements.txt.j2 +++ b/charmcraft/templates/init-django-framework/requirements.txt.j2 @@ -1 +1,2 @@ -paas-app-charmer==1.* +ops ~= 2.17 +paas-charm>=1.0,<2 diff --git a/charmcraft/templates/init-django-framework/src/charm.py.j2 b/charmcraft/templates/init-django-framework/src/charm.py.j2 index d31095a16..359b47307 100755 --- a/charmcraft/templates/init-django-framework/src/charm.py.j2 +++ b/charmcraft/templates/init-django-framework/src/charm.py.j2 @@ -9,12 +9,12 @@ import typing import ops -import paas_app_charmer.django +import paas_charm.django logger = logging.getLogger(__name__) -class {{ class_name }}(paas_app_charmer.django.Charm): +class {{ class_name }}(paas_charm.django.Charm): """Django Charm service.""" def __init__(self, *args: typing.Any) -> None: @@ -27,4 +27,4 @@ class {{ class_name }}(paas_app_charmer.django.Charm): if __name__ == "__main__": - ops.main.main({{ class_name }}) + ops.main({{ class_name }}) diff --git a/charmcraft/templates/init-django-framework/tox.ini.j2 b/charmcraft/templates/init-django-framework/tox.ini.j2 new file mode 100644 index 000000000..351f36085 --- /dev/null +++ b/charmcraft/templates/init-django-framework/tox.ini.j2 @@ -0,0 +1,84 @@ +# Copyright {{ year }} {{ author }} +# See LICENSE file for licensing details. + +[tox] +no_package = True +skip_missing_interpreters = True +env_list = format, lint, static +min_version = 4.0.0 + +[vars] +src_path = {tox_root}/src +;tests_path = {tox_root}/tests +;lib_path = {tox_root}/lib/charms/operator_name_with_underscores +all_path = {[vars]src_path} + +[testenv] +set_env = + PYTHONPATH = {tox_root}/lib:{[vars]src_path} + PYTHONBREAKPOINT=pdb.set_trace + PY_COLORS=1 +pass_env = + PYTHONPATH + CHARM_BUILD_DIR + MODEL_SETTINGS + +[testenv:format] +description = Apply coding style standards to code +deps = + ruff +commands = + ruff format {[vars]all_path} + ruff check --fix {[vars]all_path} + +[testenv:lint] +description = Check code against coding style standards +deps = + ruff + codespell +commands = + # if this charm owns a lib, uncomment "lib_path" variable + # and uncomment the following line + # codespell {[vars]lib_path} + codespell {tox_root} + ruff check {[vars]all_path} + ruff format --check --diff {[vars]all_path} + +[testenv:unit] +description = Run unit tests +deps = + pytest + coverage[toml] + -r {tox_root}/requirements.txt +commands = + coverage run --source={[vars]src_path} \ + -m pytest \ + --tb native \ + -v \ + -s \ + {posargs} \ + {[vars]tests_path}/unit + coverage report + +[testenv:static] +description = Run static type checks +deps = + pyright + -r {tox_root}/requirements.txt +commands = + pyright {posargs} + +[testenv:integration] +description = Run integration tests +deps = + pytest + juju + pytest-operator + -r {tox_root}/requirements.txt +commands = + pytest -v \ + -s \ + --tb native \ + --log-cli-level=INFO \ + {posargs} \ + {[vars]tests_path}/integration diff --git a/charmcraft/templates/init-fastapi-framework/.gitignore.j2 b/charmcraft/templates/init-fastapi-framework/.gitignore.j2 new file mode 100644 index 000000000..a26d707f9 --- /dev/null +++ b/charmcraft/templates/init-fastapi-framework/.gitignore.j2 @@ -0,0 +1,9 @@ +venv/ +build/ +*.charm +.tox/ +.coverage +__pycache__/ +*.py[cod] +.idea +.vscode/ diff --git a/charmcraft/templates/init-fastapi-framework/charmcraft.yaml.j2 b/charmcraft/templates/init-fastapi-framework/charmcraft.yaml.j2 new file mode 100644 index 000000000..a8b24a74a --- /dev/null +++ b/charmcraft/templates/init-fastapi-framework/charmcraft.yaml.j2 @@ -0,0 +1,59 @@ +# This file configures Charmcraft. +# See https://juju.is/docs/sdk/charmcraft-config for guidance. + +name: {{ name }} + +type: charm + +base: ubuntu@24.04 + +# the platforms this charm should be built on and run on. +# you can check your architecture with `dpkg --print-architecture` +platforms: + amd64: + # arm64: + # ppc64el: + # s390x: + +# (Required) +summary: A very short one-line summary of the FastAPI application. + +# (Required) +description: | + A comprehensive overview of your FastAPI application. + +extensions: + - fastapi-framework + +# Uncomment the integrations used by your application +# Integrations set to "optional: false" will block the charm +# until the applications are integrated. +# requires: +# mysql: +# interface: mysql_client +# optional: false +# limit: 1 +# postgresql: +# interface: postgresql_client +# optional: false +# limit: 1 +# mongodb: +# interface: mongodb_client +# optional: false +# limit: 1 +# redis: +# interface: redis +# optional: false +# limit: 1 +# s3: +# interface: s3 +# optional: false +# limit: 1 +# saml: +# interface: saml +# optional: false +# limit: 1 +# rabbitmq: +# interface: rabbitmq +# optional: false +# limit: 1 diff --git a/charmcraft/templates/init-fastapi-framework/pyproject.toml.j2 b/charmcraft/templates/init-fastapi-framework/pyproject.toml.j2 new file mode 100644 index 000000000..3cb1ce223 --- /dev/null +++ b/charmcraft/templates/init-fastapi-framework/pyproject.toml.j2 @@ -0,0 +1,41 @@ +# Testing tools configuration +[tool.coverage.run] +branch = true + +[tool.coverage.report] +show_missing = true + +[tool.pytest.ini_options] +minversion = "6.0" +log_cli_level = "INFO" + +# Linting tools configuration +[tool.ruff] +line-length = 99 +lint.select = ["E", "W", "F", "C", "N", "D", "I001"] +lint.extend-ignore = [ + "D105", + "D107", + "D203", + "D204", + "D213", + "D215", + "D400", + "D404", + "D406", + "D407", + "D408", + "D409", + "D413", +] +extend-exclude = ["__pycache__", "*.egg_info"] +lint.per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]} + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.codespell] +skip = "build,lib,venv,icon.svg,.tox,.git,.mypy_cache,.ruff_cache,.coverage" + +[tool.pyright] +include = ["src/**.py"] diff --git a/charmcraft/templates/init-fastapi-framework/requirements.txt.j2 b/charmcraft/templates/init-fastapi-framework/requirements.txt.j2 new file mode 100644 index 000000000..d58a30c21 --- /dev/null +++ b/charmcraft/templates/init-fastapi-framework/requirements.txt.j2 @@ -0,0 +1,2 @@ +ops ~= 2.17 +paas-charm>=1.0,<2 diff --git a/charmcraft/templates/init-fastapi-framework/src/charm.py.j2 b/charmcraft/templates/init-fastapi-framework/src/charm.py.j2 new file mode 100755 index 000000000..84f9fa77a --- /dev/null +++ b/charmcraft/templates/init-fastapi-framework/src/charm.py.j2 @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# Copyright {{ year }} {{ author }} +# See LICENSE file for licensing details. + +"""FastAPI Charm entrypoint.""" + +import logging +import typing + +import ops + +import paas_charm.fastapi + +logger = logging.getLogger(__name__) + + +class {{ class_name }}(paas_charm.fastapi.Charm): + """FastAPI Charm service.""" + + def __init__(self, *args: typing.Any) -> None: + """Initialize the instance. + + Args: + args: passthrough to CharmBase. + """ + super().__init__(*args) + + +if __name__ == "__main__": + ops.main({{ class_name }}) diff --git a/charmcraft/templates/init-fastapi-framework/tox.ini.j2 b/charmcraft/templates/init-fastapi-framework/tox.ini.j2 new file mode 100644 index 000000000..351f36085 --- /dev/null +++ b/charmcraft/templates/init-fastapi-framework/tox.ini.j2 @@ -0,0 +1,84 @@ +# Copyright {{ year }} {{ author }} +# See LICENSE file for licensing details. + +[tox] +no_package = True +skip_missing_interpreters = True +env_list = format, lint, static +min_version = 4.0.0 + +[vars] +src_path = {tox_root}/src +;tests_path = {tox_root}/tests +;lib_path = {tox_root}/lib/charms/operator_name_with_underscores +all_path = {[vars]src_path} + +[testenv] +set_env = + PYTHONPATH = {tox_root}/lib:{[vars]src_path} + PYTHONBREAKPOINT=pdb.set_trace + PY_COLORS=1 +pass_env = + PYTHONPATH + CHARM_BUILD_DIR + MODEL_SETTINGS + +[testenv:format] +description = Apply coding style standards to code +deps = + ruff +commands = + ruff format {[vars]all_path} + ruff check --fix {[vars]all_path} + +[testenv:lint] +description = Check code against coding style standards +deps = + ruff + codespell +commands = + # if this charm owns a lib, uncomment "lib_path" variable + # and uncomment the following line + # codespell {[vars]lib_path} + codespell {tox_root} + ruff check {[vars]all_path} + ruff format --check --diff {[vars]all_path} + +[testenv:unit] +description = Run unit tests +deps = + pytest + coverage[toml] + -r {tox_root}/requirements.txt +commands = + coverage run --source={[vars]src_path} \ + -m pytest \ + --tb native \ + -v \ + -s \ + {posargs} \ + {[vars]tests_path}/unit + coverage report + +[testenv:static] +description = Run static type checks +deps = + pyright + -r {tox_root}/requirements.txt +commands = + pyright {posargs} + +[testenv:integration] +description = Run integration tests +deps = + pytest + juju + pytest-operator + -r {tox_root}/requirements.txt +commands = + pytest -v \ + -s \ + --tb native \ + --log-cli-level=INFO \ + {posargs} \ + {[vars]tests_path}/integration diff --git a/charmcraft/templates/init-flask-framework/charmcraft.yaml.j2 b/charmcraft/templates/init-flask-framework/charmcraft.yaml.j2 index 7421920fc..888b49d25 100644 --- a/charmcraft/templates/init-flask-framework/charmcraft.yaml.j2 +++ b/charmcraft/templates/init-flask-framework/charmcraft.yaml.j2 @@ -51,3 +51,7 @@ extensions: # interface: saml # optional: false # limit: 1 +# rabbitmq: +# interface: rabbitmq +# optional: false +# limit: 1 diff --git a/charmcraft/templates/init-flask-framework/pyproject.toml.j2 b/charmcraft/templates/init-flask-framework/pyproject.toml.j2 new file mode 100644 index 000000000..3cb1ce223 --- /dev/null +++ b/charmcraft/templates/init-flask-framework/pyproject.toml.j2 @@ -0,0 +1,41 @@ +# Testing tools configuration +[tool.coverage.run] +branch = true + +[tool.coverage.report] +show_missing = true + +[tool.pytest.ini_options] +minversion = "6.0" +log_cli_level = "INFO" + +# Linting tools configuration +[tool.ruff] +line-length = 99 +lint.select = ["E", "W", "F", "C", "N", "D", "I001"] +lint.extend-ignore = [ + "D105", + "D107", + "D203", + "D204", + "D213", + "D215", + "D400", + "D404", + "D406", + "D407", + "D408", + "D409", + "D413", +] +extend-exclude = ["__pycache__", "*.egg_info"] +lint.per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]} + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.codespell] +skip = "build,lib,venv,icon.svg,.tox,.git,.mypy_cache,.ruff_cache,.coverage" + +[tool.pyright] +include = ["src/**.py"] diff --git a/charmcraft/templates/init-flask-framework/requirements.txt.j2 b/charmcraft/templates/init-flask-framework/requirements.txt.j2 index acab50eb1..d58a30c21 100644 --- a/charmcraft/templates/init-flask-framework/requirements.txt.j2 +++ b/charmcraft/templates/init-flask-framework/requirements.txt.j2 @@ -1 +1,2 @@ -paas-app-charmer==1.* +ops ~= 2.17 +paas-charm>=1.0,<2 diff --git a/charmcraft/templates/init-flask-framework/src/charm.py.j2 b/charmcraft/templates/init-flask-framework/src/charm.py.j2 index b75c440a3..94cb3f33f 100755 --- a/charmcraft/templates/init-flask-framework/src/charm.py.j2 +++ b/charmcraft/templates/init-flask-framework/src/charm.py.j2 @@ -9,12 +9,12 @@ import typing import ops -import paas_app_charmer.flask +import paas_charm.flask logger = logging.getLogger(__name__) -class {{ class_name }}(paas_app_charmer.flask.Charm): +class {{ class_name }}(paas_charm.flask.Charm): """Flask Charm service.""" def __init__(self, *args: typing.Any) -> None: @@ -27,4 +27,4 @@ class {{ class_name }}(paas_app_charmer.flask.Charm): if __name__ == "__main__": - ops.main.main({{ class_name }}) + ops.main({{ class_name }}) diff --git a/charmcraft/templates/init-flask-framework/tox.ini.j2 b/charmcraft/templates/init-flask-framework/tox.ini.j2 new file mode 100644 index 000000000..351f36085 --- /dev/null +++ b/charmcraft/templates/init-flask-framework/tox.ini.j2 @@ -0,0 +1,84 @@ +# Copyright {{ year }} {{ author }} +# See LICENSE file for licensing details. + +[tox] +no_package = True +skip_missing_interpreters = True +env_list = format, lint, static +min_version = 4.0.0 + +[vars] +src_path = {tox_root}/src +;tests_path = {tox_root}/tests +;lib_path = {tox_root}/lib/charms/operator_name_with_underscores +all_path = {[vars]src_path} + +[testenv] +set_env = + PYTHONPATH = {tox_root}/lib:{[vars]src_path} + PYTHONBREAKPOINT=pdb.set_trace + PY_COLORS=1 +pass_env = + PYTHONPATH + CHARM_BUILD_DIR + MODEL_SETTINGS + +[testenv:format] +description = Apply coding style standards to code +deps = + ruff +commands = + ruff format {[vars]all_path} + ruff check --fix {[vars]all_path} + +[testenv:lint] +description = Check code against coding style standards +deps = + ruff + codespell +commands = + # if this charm owns a lib, uncomment "lib_path" variable + # and uncomment the following line + # codespell {[vars]lib_path} + codespell {tox_root} + ruff check {[vars]all_path} + ruff format --check --diff {[vars]all_path} + +[testenv:unit] +description = Run unit tests +deps = + pytest + coverage[toml] + -r {tox_root}/requirements.txt +commands = + coverage run --source={[vars]src_path} \ + -m pytest \ + --tb native \ + -v \ + -s \ + {posargs} \ + {[vars]tests_path}/unit + coverage report + +[testenv:static] +description = Run static type checks +deps = + pyright + -r {tox_root}/requirements.txt +commands = + pyright {posargs} + +[testenv:integration] +description = Run integration tests +deps = + pytest + juju + pytest-operator + -r {tox_root}/requirements.txt +commands = + pytest -v \ + -s \ + --tb native \ + --log-cli-level=INFO \ + {posargs} \ + {[vars]tests_path}/integration diff --git a/charmcraft/templates/init-go-framework/charmcraft.yaml.j2 b/charmcraft/templates/init-go-framework/charmcraft.yaml.j2 index 20e93214d..8ddd8b05c 100644 --- a/charmcraft/templates/init-go-framework/charmcraft.yaml.j2 +++ b/charmcraft/templates/init-go-framework/charmcraft.yaml.j2 @@ -53,3 +53,7 @@ extensions: # interface: saml # optional: false # limit: 1 +# rabbitmq: +# interface: rabbitmq +# optional: false +# limit: 1 diff --git a/charmcraft/templates/init-go-framework/pyproject.toml.j2 b/charmcraft/templates/init-go-framework/pyproject.toml.j2 new file mode 100644 index 000000000..3cb1ce223 --- /dev/null +++ b/charmcraft/templates/init-go-framework/pyproject.toml.j2 @@ -0,0 +1,41 @@ +# Testing tools configuration +[tool.coverage.run] +branch = true + +[tool.coverage.report] +show_missing = true + +[tool.pytest.ini_options] +minversion = "6.0" +log_cli_level = "INFO" + +# Linting tools configuration +[tool.ruff] +line-length = 99 +lint.select = ["E", "W", "F", "C", "N", "D", "I001"] +lint.extend-ignore = [ + "D105", + "D107", + "D203", + "D204", + "D213", + "D215", + "D400", + "D404", + "D406", + "D407", + "D408", + "D409", + "D413", +] +extend-exclude = ["__pycache__", "*.egg_info"] +lint.per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]} + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.codespell] +skip = "build,lib,venv,icon.svg,.tox,.git,.mypy_cache,.ruff_cache,.coverage" + +[tool.pyright] +include = ["src/**.py"] diff --git a/charmcraft/templates/init-go-framework/requirements.txt.j2 b/charmcraft/templates/init-go-framework/requirements.txt.j2 index acab50eb1..d58a30c21 100644 --- a/charmcraft/templates/init-go-framework/requirements.txt.j2 +++ b/charmcraft/templates/init-go-framework/requirements.txt.j2 @@ -1 +1,2 @@ -paas-app-charmer==1.* +ops ~= 2.17 +paas-charm>=1.0,<2 diff --git a/charmcraft/templates/init-go-framework/src/charm.py.j2 b/charmcraft/templates/init-go-framework/src/charm.py.j2 index de6162513..c32223b7e 100755 --- a/charmcraft/templates/init-go-framework/src/charm.py.j2 +++ b/charmcraft/templates/init-go-framework/src/charm.py.j2 @@ -9,12 +9,12 @@ import typing import ops -import paas_app_charmer.go +import paas_charm.go logger = logging.getLogger(__name__) -class {{ class_name }}(paas_app_charmer.go.Charm): +class {{ class_name }}(paas_charm.go.Charm): """Go Charm service.""" def __init__(self, *args: typing.Any) -> None: @@ -27,4 +27,4 @@ class {{ class_name }}(paas_app_charmer.go.Charm): if __name__ == "__main__": - ops.main.main({{ class_name }}) + ops.main({{ class_name }}) diff --git a/charmcraft/templates/init-go-framework/tox.ini.j2 b/charmcraft/templates/init-go-framework/tox.ini.j2 new file mode 100644 index 000000000..351f36085 --- /dev/null +++ b/charmcraft/templates/init-go-framework/tox.ini.j2 @@ -0,0 +1,84 @@ +# Copyright {{ year }} {{ author }} +# See LICENSE file for licensing details. + +[tox] +no_package = True +skip_missing_interpreters = True +env_list = format, lint, static +min_version = 4.0.0 + +[vars] +src_path = {tox_root}/src +;tests_path = {tox_root}/tests +;lib_path = {tox_root}/lib/charms/operator_name_with_underscores +all_path = {[vars]src_path} + +[testenv] +set_env = + PYTHONPATH = {tox_root}/lib:{[vars]src_path} + PYTHONBREAKPOINT=pdb.set_trace + PY_COLORS=1 +pass_env = + PYTHONPATH + CHARM_BUILD_DIR + MODEL_SETTINGS + +[testenv:format] +description = Apply coding style standards to code +deps = + ruff +commands = + ruff format {[vars]all_path} + ruff check --fix {[vars]all_path} + +[testenv:lint] +description = Check code against coding style standards +deps = + ruff + codespell +commands = + # if this charm owns a lib, uncomment "lib_path" variable + # and uncomment the following line + # codespell {[vars]lib_path} + codespell {tox_root} + ruff check {[vars]all_path} + ruff format --check --diff {[vars]all_path} + +[testenv:unit] +description = Run unit tests +deps = + pytest + coverage[toml] + -r {tox_root}/requirements.txt +commands = + coverage run --source={[vars]src_path} \ + -m pytest \ + --tb native \ + -v \ + -s \ + {posargs} \ + {[vars]tests_path}/unit + coverage report + +[testenv:static] +description = Run static type checks +deps = + pyright + -r {tox_root}/requirements.txt +commands = + pyright {posargs} + +[testenv:integration] +description = Run integration tests +deps = + pytest + juju + pytest-operator + -r {tox_root}/requirements.txt +commands = + pytest -v \ + -s \ + --tb native \ + --log-cli-level=INFO \ + {posargs} \ + {[vars]tests_path}/integration diff --git a/charmcraft/templates/init-kubernetes/pyproject.toml.j2 b/charmcraft/templates/init-kubernetes/pyproject.toml.j2 index ceeab132b..3cb1ce223 100644 --- a/charmcraft/templates/init-kubernetes/pyproject.toml.j2 +++ b/charmcraft/templates/init-kubernetes/pyproject.toml.j2 @@ -14,6 +14,8 @@ log_cli_level = "INFO" line-length = 99 lint.select = ["E", "W", "F", "C", "N", "D", "I001"] lint.extend-ignore = [ + "D105", + "D107", "D203", "D204", "D213", @@ -26,7 +28,6 @@ lint.extend-ignore = [ "D409", "D413", ] -lint.ignore = ["E501", "D107"] extend-exclude = ["__pycache__", "*.egg_info"] lint.per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]} diff --git a/charmcraft/templates/init-kubernetes/requirements.txt.j2 b/charmcraft/templates/init-kubernetes/requirements.txt.j2 index b00d7bc12..0356c38b5 100644 --- a/charmcraft/templates/init-kubernetes/requirements.txt.j2 +++ b/charmcraft/templates/init-kubernetes/requirements.txt.j2 @@ -1 +1 @@ -ops ~= 2.8 +ops ~= 2.17 diff --git a/charmcraft/templates/init-kubernetes/src/charm.py.j2 b/charmcraft/templates/init-kubernetes/src/charm.py.j2 index 7c2ba6412..2aa189958 100755 --- a/charmcraft/templates/init-kubernetes/src/charm.py.j2 +++ b/charmcraft/templates/init-kubernetes/src/charm.py.j2 @@ -24,4 +24,4 @@ class {{ class_name }}(ops.CharmBase): if __name__ == "__main__": # pragma: nocover - ops.main({{ class_name }}) # type: ignore + ops.main({{ class_name }}) diff --git a/charmcraft/templates/init-kubernetes/tests/unit/test_charm.py.j2 b/charmcraft/templates/init-kubernetes/tests/unit/test_charm.py.j2 index 6d43b2357..a67e465b3 100644 --- a/charmcraft/templates/init-kubernetes/tests/unit/test_charm.py.j2 +++ b/charmcraft/templates/init-kubernetes/tests/unit/test_charm.py.j2 @@ -3,22 +3,19 @@ # # Learn more about testing at: https://juju.is/docs/sdk/testing -import ops -import ops.testing -import pytest +from ops import testing + from charm import {{ class_name }} -@pytest.fixture -def harness(): - harness = ops.testing.Harness({{ class_name }}) - harness.begin() - yield harness - harness.cleanup() +def test_pebble_ready(): + # Arrange: + ctx = testing.Context({{ class_name }}) + container = testing.Container("some-container", can_connect=True) + state_in = testing.State(containers={container}) + # Act: + state_out = ctx.run(ctx.on.pebble_ready(container), state_in) -def test_pebble_ready(harness: ops.testing.Harness[{{ class_name }}]): - # Simulate the container coming up and emission of pebble-ready event - harness.container_pebble_ready("some-container") - # Ensure we set an ActiveStatus with no message - assert harness.model.unit.status == ops.ActiveStatus() + # Assert: + assert state_out.unit_status == testing.ActiveStatus() diff --git a/charmcraft/templates/init-kubernetes/tox.ini.j2 b/charmcraft/templates/init-kubernetes/tox.ini.j2 index 1b06be988..f30c25577 100644 --- a/charmcraft/templates/init-kubernetes/tox.ini.j2 +++ b/charmcraft/templates/init-kubernetes/tox.ini.j2 @@ -49,6 +49,7 @@ description = Run unit tests deps = pytest coverage[toml] + ops[testing] -r {tox_root}/requirements.txt commands = coverage run --source={[vars]src_path} \ @@ -64,6 +65,7 @@ commands = description = Run static type checks deps = pyright + ops[testing] -r {tox_root}/requirements.txt commands = pyright {posargs} diff --git a/charmcraft/templates/init-machine/pyproject.toml.j2 b/charmcraft/templates/init-machine/pyproject.toml.j2 index ceeab132b..3cb1ce223 100644 --- a/charmcraft/templates/init-machine/pyproject.toml.j2 +++ b/charmcraft/templates/init-machine/pyproject.toml.j2 @@ -14,6 +14,8 @@ log_cli_level = "INFO" line-length = 99 lint.select = ["E", "W", "F", "C", "N", "D", "I001"] lint.extend-ignore = [ + "D105", + "D107", "D203", "D204", "D213", @@ -26,7 +28,6 @@ lint.extend-ignore = [ "D409", "D413", ] -lint.ignore = ["E501", "D107"] extend-exclude = ["__pycache__", "*.egg_info"] lint.per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]} diff --git a/charmcraft/templates/init-machine/requirements.txt.j2 b/charmcraft/templates/init-machine/requirements.txt.j2 index b00d7bc12..0356c38b5 100644 --- a/charmcraft/templates/init-machine/requirements.txt.j2 +++ b/charmcraft/templates/init-machine/requirements.txt.j2 @@ -1 +1 @@ -ops ~= 2.8 +ops ~= 2.17 diff --git a/charmcraft/templates/init-machine/src/charm.py.j2 b/charmcraft/templates/init-machine/src/charm.py.j2 index c57008efd..3ba24f180 100644 --- a/charmcraft/templates/init-machine/src/charm.py.j2 +++ b/charmcraft/templates/init-machine/src/charm.py.j2 @@ -24,4 +24,4 @@ class {{ class_name }}(ops.CharmBase): if __name__ == "__main__": # pragma: nocover - ops.main({{ class_name }}) # type: ignore + ops.main({{ class_name }}) diff --git a/charmcraft/templates/init-machine/tests/unit/test_charm.py.j2 b/charmcraft/templates/init-machine/tests/unit/test_charm.py.j2 index 0120cfbab..bd7968cf6 100644 --- a/charmcraft/templates/init-machine/tests/unit/test_charm.py.j2 +++ b/charmcraft/templates/init-machine/tests/unit/test_charm.py.j2 @@ -5,19 +5,15 @@ import unittest -import ops -import ops.testing -from charm import {{ class_name }} - +from ops import testing -class TestCharm(unittest.TestCase): - def setUp(self): - self.harness = ops.testing.Harness({{ class_name }}) - self.addCleanup(self.harness.cleanup) +from charm import {{ class_name }} - def test_start(self): - # Simulate the charm starting - self.harness.begin_with_initial_hooks() - # Ensure we set an ActiveStatus with no message - self.assertEqual(self.harness.model.unit.status, ops.ActiveStatus()) +def test_start(): + # Arrange: + ctx = testing.Context({{ class_name }}) + # Act: + state_out = ctx.run(ctx.on.start(), testing.State()) + # Assert: + assert state_out.unit_status == testing.ActiveStatus() diff --git a/charmcraft/templates/init-machine/tox.ini.j2 b/charmcraft/templates/init-machine/tox.ini.j2 index 1b06be988..f30c25577 100644 --- a/charmcraft/templates/init-machine/tox.ini.j2 +++ b/charmcraft/templates/init-machine/tox.ini.j2 @@ -49,6 +49,7 @@ description = Run unit tests deps = pytest coverage[toml] + ops[testing] -r {tox_root}/requirements.txt commands = coverage run --source={[vars]src_path} \ @@ -64,6 +65,7 @@ commands = description = Run static type checks deps = pyright + ops[testing] -r {tox_root}/requirements.txt commands = pyright {posargs} diff --git a/charmcraft/templates/init-simple/pyproject.toml.j2 b/charmcraft/templates/init-simple/pyproject.toml.j2 index ceeab132b..3cb1ce223 100644 --- a/charmcraft/templates/init-simple/pyproject.toml.j2 +++ b/charmcraft/templates/init-simple/pyproject.toml.j2 @@ -14,6 +14,8 @@ log_cli_level = "INFO" line-length = 99 lint.select = ["E", "W", "F", "C", "N", "D", "I001"] lint.extend-ignore = [ + "D105", + "D107", "D203", "D204", "D213", @@ -26,7 +28,6 @@ lint.extend-ignore = [ "D409", "D413", ] -lint.ignore = ["E501", "D107"] extend-exclude = ["__pycache__", "*.egg_info"] lint.per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]} diff --git a/charmcraft/templates/init-simple/requirements.txt.j2 b/charmcraft/templates/init-simple/requirements.txt.j2 index b00d7bc12..0356c38b5 100644 --- a/charmcraft/templates/init-simple/requirements.txt.j2 +++ b/charmcraft/templates/init-simple/requirements.txt.j2 @@ -1 +1 @@ -ops ~= 2.8 +ops ~= 2.17 diff --git a/charmcraft/templates/init-simple/src/charm.py.j2 b/charmcraft/templates/init-simple/src/charm.py.j2 index e84df9ad6..d1b9e1bd1 100644 --- a/charmcraft/templates/init-simple/src/charm.py.j2 +++ b/charmcraft/templates/init-simple/src/charm.py.j2 @@ -64,21 +64,21 @@ class {{ class_name }}(ops.CharmBase): if log_level in VALID_LOG_LEVELS: # The config is good, so update the configuration of the workload container = self.unit.get_container("httpbin") - # Verify that we can connect to the Pebble API in the workload container - if container.can_connect(): - # Push an updated layer with the new config + # Push an updated layer with the new config + try: container.add_layer("httpbin", self._pebble_layer, combine=True) container.replan() - - logger.debug("Log level for gunicorn changed to '%s'", log_level) - self.unit.status = ops.ActiveStatus() - else: + except ops.pebble.ConnectionError: # We were unable to connect to the Pebble API, so we defer this event + self.unit.status = ops.MaintenanceStatus("waiting for Pebble API") event.defer() - self.unit.status = ops.WaitingStatus("waiting for Pebble API") + return + + logger.debug("Log level for gunicorn changed to '%s'", log_level) + self.unit.status = ops.ActiveStatus() else: # In this case, the config option is bad, so block the charm and notify the operator. - self.unit.status = ops.BlockedStatus("invalid log level: '{log_level}'") + self.unit.status = ops.BlockedStatus(f"invalid log level: '{log_level}'") @property def _pebble_layer(self) -> ops.pebble.LayerDict: @@ -101,4 +101,4 @@ class {{ class_name }}(ops.CharmBase): if __name__ == "__main__": # pragma: nocover - ops.main({{ class_name }}) # type: ignore + ops.main({{ class_name }}) diff --git a/charmcraft/templates/init-simple/tests/unit/test_charm.py.j2 b/charmcraft/templates/init-simple/tests/unit/test_charm.py.j2 index 67ac35c9d..88dd0dd00 100644 --- a/charmcraft/templates/init-simple/tests/unit/test_charm.py.j2 +++ b/charmcraft/templates/init-simple/tests/unit/test_charm.py.j2 @@ -4,21 +4,23 @@ # Learn more about testing at: https://juju.is/docs/sdk/testing import ops -import ops.testing -import pytest +import ops.pebble +from ops import testing + from charm import {{ class_name }} -@pytest.fixture -def harness(): - harness = ops.testing.Harness({{ class_name }}) - harness.begin() - yield harness - harness.cleanup() +def test_httpbin_pebble_ready(): + # Arrange: + ctx = testing.Context({{ class_name }}) + container = testing.Container("httpbin", can_connect=True) + state_in = testing.State(containers={container}) + # Act: + state_out = ctx.run(ctx.on.pebble_ready(container), state_in) -def test_httpbin_pebble_ready(harness: ops.testing.Harness[{{ class_name }}]): - # Expected plan after Pebble ready with default config + # Assert: + updated_plan = state_out.get_container(container.name).plan expected_plan = { "services": { "httpbin": { @@ -30,43 +32,63 @@ def test_httpbin_pebble_ready(harness: ops.testing.Harness[{{ class_name }}]): } }, } - # Simulate the container coming up and emission of pebble-ready event - harness.container_pebble_ready("httpbin") - # Get the plan now we've run PebbleReady - updated_plan = harness.get_container_pebble_plan("httpbin").to_dict() - # Check we've got the plan we expected assert expected_plan == updated_plan - # Check the service was started - service = harness.model.unit.get_container("httpbin").get_service("httpbin") - assert service.is_running() - # Ensure we set an ActiveStatus with no message - assert harness.model.unit.status == ops.ActiveStatus() - - -def test_config_changed_valid_can_connect(harness: ops.testing.Harness[{{ class_name }}]): - # Ensure the simulated Pebble API is reachable - harness.set_can_connect("httpbin", True) - # Trigger a config-changed event with an updated value - harness.update_config({"log-level": "debug"}) - # Get the plan now we've run PebbleReady - updated_plan = harness.get_container_pebble_plan("httpbin").to_dict() - updated_env = updated_plan["services"]["httpbin"]["environment"] - # Check the config change was effective - assert updated_env == {"GUNICORN_CMD_ARGS": "--log-level debug"} - assert harness.model.unit.status == ops.ActiveStatus() - - -def test_config_changed_valid_cannot_connect(harness: ops.testing.Harness[{{ class_name }}]): - # Trigger a config-changed event with an updated value - harness.update_config({"log-level": "debug"}) - # Check the charm is in WaitingStatus - assert isinstance(harness.model.unit.status, ops.WaitingStatus) - - -def test_config_changed_invalid(harness: ops.testing.Harness[{{ class_name }}]): - # Ensure the simulated Pebble API is reachable - harness.set_can_connect("httpbin", True) - # Trigger a config-changed event with an updated value - harness.update_config({"log-level": "foobar"}) - # Check the charm is in BlockedStatus - assert isinstance(harness.model.unit.status, ops.BlockedStatus) + assert ( + state_out.get_container(container.name).service_statuses["httpbin"] + == ops.pebble.ServiceStatus.ACTIVE + ) + assert state_out.unit_status == testing.ActiveStatus() + + +def test_config_changed_valid_can_connect(): + """Test a config-changed event when the config is valid and the container can be reached.""" + # Arrange: + ctx = testing.Context({{ class_name }}) # The default config will be read from charmcraft.yaml + container = testing.Container("httpbin", can_connect=True) + state_in = testing.State( + containers={container}, + config={"log-level": "debug"}, # This is the config the charmer passed with `juju config` + ) + + # Act: + state_out = ctx.run(ctx.on.config_changed(), state_in) + + # Assert: + updated_plan = state_out.get_container(container.name).plan + gunicorn_args = updated_plan.services["httpbin"].environment["GUNICORN_CMD_ARGS"] + assert gunicorn_args == "--log-level debug" + assert state_out.unit_status == testing.ActiveStatus() + + +def test_config_changed_valid_cannot_connect(): + """Test a config-changed event when the config is valid but the container cannot be reached. + + We expect to end up in MaintenanceStatus waiting for the deferred event to + be retried. + """ + # Arrange: + ctx = testing.Context({{ class_name }}) + container = testing.Container("httpbin", can_connect=False) + state_in = testing.State(containers={container}, config={"log-level": "debug"}) + + # Act: + state_out = ctx.run(ctx.on.config_changed(), state_in) + + # Assert: + assert isinstance(state_out.unit_status, testing.MaintenanceStatus) + + +def test_config_changed_invalid(): + """Test a config-changed event when the config is invalid.""" + # Arrange: + ctx = testing.Context({{ class_name }}) + container = testing.Container("httpbin", can_connect=True) + invalid_level = "foobar" + state_in = testing.State(containers={container}, config={"log-level": invalid_level}) + + # Act: + state_out = ctx.run(ctx.on.config_changed(), state_in) + + # Assert: + assert isinstance(state_out.unit_status, testing.BlockedStatus) + assert invalid_level in state_out.unit_status.message diff --git a/charmcraft/templates/init-simple/tox.ini.j2 b/charmcraft/templates/init-simple/tox.ini.j2 index 1b06be988..f30c25577 100644 --- a/charmcraft/templates/init-simple/tox.ini.j2 +++ b/charmcraft/templates/init-simple/tox.ini.j2 @@ -49,6 +49,7 @@ description = Run unit tests deps = pytest coverage[toml] + ops[testing] -r {tox_root}/requirements.txt commands = coverage run --source={[vars]src_path} \ @@ -64,6 +65,7 @@ commands = description = Run static type checks deps = pyright + ops[testing] -r {tox_root}/requirements.txt commands = pyright {posargs} diff --git a/charmcraft/utils/__init__.py b/charmcraft/utils/__init__.py index eac510698..a3995d685 100644 --- a/charmcraft/utils/__init__.py +++ b/charmcraft/utils/__init__.py @@ -19,11 +19,13 @@ from charmcraft.utils.charmlibs import ( LibData, LibInternals, - get_name_from_metadata, + QualifiedLibraryName, + get_name_from_yaml, create_charm_name_from_importable, create_importable_name, get_lib_internals, get_lib_path, + get_lib_charm_path, get_lib_module_name, get_lib_info, get_libs_from_tree, @@ -44,7 +46,13 @@ get_os_platform, validate_architectures, ) -from charmcraft.utils.file import S_IRALL, S_IXALL, make_executable, useful_filepath, build_zip +from charmcraft.utils.file import ( + S_IRALL, + S_IXALL, + make_executable, + useful_filepath, + build_zip, +) from charmcraft.utils.package import ( get_pypi_packages, PACKAGE_LINE_REGEX, @@ -55,6 +63,11 @@ get_requirements_file_package_names, validate_strict_dependencies, ) +from charmcraft.utils.parts import ( + extend_python_build_environment, + get_charm_copy_commands, + get_venv_cleanup_commands, +) from charmcraft.utils.project import ( find_charm_sources, get_charm_name_from_path, @@ -67,11 +80,13 @@ __all__ = [ "LibData", "LibInternals", - "get_name_from_metadata", + "QualifiedLibraryName", + "get_name_from_yaml", "create_charm_name_from_importable", "create_importable_name", "get_lib_internals", "get_lib_path", + "get_lib_charm_path", "get_lib_module_name", "get_lib_info", "get_libs_from_tree", @@ -100,6 +115,9 @@ "confirm_with_user", "format_content", "humanize_list", + "extend_python_build_environment", + "get_charm_copy_commands", + "get_venv_cleanup_commands", "find_charm_sources", "get_charm_name_from_path", "get_templates_environment", diff --git a/charmcraft/utils/charmlibs.py b/charmcraft/utils/charmlibs.py index 5d044e1a7..c66c45c38 100644 --- a/charmcraft/utils/charmlibs.py +++ b/charmcraft/utils/charmlibs.py @@ -23,10 +23,11 @@ from dataclasses import dataclass from typing import overload -import yaml from craft_cli import CraftError +from typing_extensions import Self from charmcraft import const, errors +from charmcraft.utils.yaml import load_yaml @dataclass(frozen=True) @@ -56,15 +57,34 @@ class LibInternals: content: str -def get_name_from_metadata() -> str | None: +@dataclass +class QualifiedLibraryName: + """The parts of a library's name.""" + + charm_name: str + lib_name: str + + @classmethod + def from_string(cls, value: str) -> Self: + """Convert a string of . to a LibraryName.""" + charm_name, _, lib_name = value.partition(".") + if not charm_name or not lib_name or "." in lib_name: + raise ValueError(f"Not a valid library name: {value!r}") + return cls(create_importable_name(charm_name), lib_name) + + def __str__(self) -> str: + return f"{create_charm_name_from_importable(self.charm_name)}.{self.lib_name}" + + +def get_name_from_yaml() -> str | None: """Return the name if present and plausible in metadata.yaml.""" - try: - with open(const.METADATA_FILENAME, "rb") as fh: - metadata = yaml.safe_load(fh) - charm_name = metadata["name"] - except (yaml.error.YAMLError, OSError, KeyError): - return None - return charm_name + charmcraft_yaml = load_yaml(pathlib.Path(const.CHARMCRAFT_FILENAME)) + if charmcraft_yaml and "name" in charmcraft_yaml: + return charmcraft_yaml.get("name") + metadata_yaml = load_yaml(pathlib.Path(const.METADATA_FILENAME)) + if metadata_yaml: + return metadata_yaml.get("name") + return None def create_importable_name(charm_name: str) -> str: @@ -97,15 +117,18 @@ def _api_patch_validator(value): simple_fields = { "LIBAPI": ( _api_patch_validator, - _msg_prefix + "LIBAPI must be a constant assignment of zero or a positive integer.", + _msg_prefix + + "LIBAPI must be a constant assignment of zero or a positive integer.", ), "LIBPATCH": ( _api_patch_validator, - _msg_prefix + "LIBPATCH must be a constant assignment of zero or a positive integer.", + _msg_prefix + + "LIBPATCH must be a constant assignment of zero or a positive integer.", ), "LIBID": ( lambda value: isinstance(value, str) and value and value.isascii(), - _msg_prefix + "LIBID must be a constant assignment of a non-empty ASCII string.", + _msg_prefix + + "LIBID must be a constant assignment of a non-empty ASCII string.", ), } pydeps_error = _msg_prefix + "PYDEPS must be a constant list of non-empty strings" @@ -177,9 +200,16 @@ def get_lib_path(charm: str, lib_name: str, api: int) -> pathlib.Path: :param api: The API version of the library :returns: A relative path to the library python file. """ - return ( - pathlib.Path("lib/charms") / create_importable_name(charm) / f"v{api}" / f"{lib_name}.py" - ) + return get_lib_charm_path(charm) / f"v{api}" / f"{lib_name}.py" + + +def get_lib_charm_path(charm: str) -> pathlib.Path: + """Get a relative path where the libraries for a charm would be stored. + + :param charm: the name of the charm + :returns: A relative path to the charm's libraries directory. + """ + return pathlib.Path("lib/charms") / create_importable_name(charm) def get_lib_module_name(charm: str, lib_name: str, api: int) -> str: @@ -197,7 +227,9 @@ def get_lib_module_name(charm: str, lib_name: str, api: int) -> str: def get_lib_info(*, full_name: str) -> LibData: ... @overload def get_lib_info(*, lib_path: pathlib.Path) -> LibData: ... -def get_lib_info(*, full_name: str | None = None, lib_path: pathlib.Path | None = None) -> LibData: +def get_lib_info( + *, full_name: str | None = None, lib_path: pathlib.Path | None = None +) -> LibData: """Get the whole lib info from the path/file. This will perform mutation of the charm name to create importable paths. @@ -210,7 +242,7 @@ def get_lib_info(*, full_name: str | None = None, lib_path: pathlib.Path | None if lib_path: # get it from the lib_path try: - libsdir, charmsdir, importable_charm_name, v_api = lib_path.parts[:-1] + libsdir, charmsdir, importable_charm_name, v_api = lib_path.parts[-5:-1] except ValueError: raise errors.BadLibraryPathError(lib_path) if libsdir != "lib" or charmsdir != "charms" or lib_path.suffix != ".py": @@ -240,7 +272,9 @@ def get_lib_info(*, full_name: str | None = None, lib_path: pathlib.Path | None charm_name = create_charm_name_from_importable(importable_charm_name) if v_api[0] != "v" or not v_api[1:].isdigit(): - raise CraftError("The API version in the library path must be 'vN' where N is an integer.") + raise CraftError( + "The API version in the library path must be 'vN' where N is an integer." + ) api_from_path = int(v_api[1:]) lib_name = lib_path.stem diff --git a/charmcraft/utils/cli.py b/charmcraft/utils/cli.py index 1f1044ec3..5165a4e73 100644 --- a/charmcraft/utils/cli.py +++ b/charmcraft/utils/cli.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """CLI-related utilities for Charmcraft.""" + import datetime import enum import json @@ -80,9 +81,7 @@ def __call__(self, value): else: if revision >= 0: return ResourceOption(name, revision) - msg = ( - "the resource format must be : (revision being a non-negative integer)" - ) + msg = "the resource format must be : (revision being a non-negative integer)" raise ValueError(msg) @@ -182,7 +181,9 @@ class OutputFormat(enum.Enum): @overload -def format_content(content: dict[str, str], fmt: Literal[OutputFormat.TABLE, "table"]) -> str: ... +def format_content( + content: dict[str, str], fmt: Literal[OutputFormat.TABLE, "table"] +) -> str: ... @overload diff --git a/charmcraft/utils/file.py b/charmcraft/utils/file.py index a88c73936..e517012ec 100644 --- a/charmcraft/utils/file.py +++ b/charmcraft/utils/file.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """File-related utilities.""" + import io import os import pathlib diff --git a/charmcraft/utils/package.py b/charmcraft/utils/package.py index 80d231606..e90946c62 100644 --- a/charmcraft/utils/package.py +++ b/charmcraft/utils/package.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Utilities related to Python packages.""" + import pathlib import re import string @@ -121,7 +122,11 @@ def get_pip_command( source_only_packages = sorted( get_package_names(all_packages) - get_package_names(binary_packages) ) - no_binary = [f"--no-binary={','.join(source_only_packages)}"] if source_only_packages else () + no_binary = ( + [f"--no-binary={','.join(source_only_packages)}"] + if source_only_packages + else () + ) return [ *prefix, @@ -133,7 +138,9 @@ def get_pip_command( def get_pip_version(pip_cmd: str) -> tuple[int, ...]: """Get the version of pip available from a specific pip command.""" - result = subprocess.run([pip_cmd, "--version"], text=True, capture_output=True, check=True) + result = subprocess.run( + [pip_cmd, "--version"], text=True, capture_output=True, check=True + ) version_data = result.stdout.split(" ") if len(version_data) < 2: raise ValueError("Unknown pip version") diff --git a/charmcraft/utils/parts.py b/charmcraft/utils/parts.py new file mode 100644 index 000000000..2c994621a --- /dev/null +++ b/charmcraft/utils/parts.py @@ -0,0 +1,76 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Utility functions for craft-parts plugins.""" + +import pathlib +import shlex +import textwrap +from collections.abc import Collection + + +def extend_python_build_environment(environment: dict[str, str]) -> dict[str, str]: + """Extend the build environment for all Python plugins. + + :param environment: the existing environment dictionary + :returns: the environment dictionary with charmcraft-specific additions. + """ + return environment | { + "PIP_NO_BINARY": ":all:", # Build from source + "PARTS_PYTHON_VENV_ARGS": "--without-pip", + } + + +def get_charm_copy_commands( + build_dir: pathlib.Path, install_dir: pathlib.Path +) -> Collection[str]: + """Get the commands to copy charm source and charmlibs into the install directory. + + The commands will only be included if the relevant directories exist. + """ + copy_command_base = ["cp", "--archive", "--recursive", "--reflink=auto"] + src_dir = build_dir / "src" + libs_dir = build_dir / "lib" + + commands = [] + if src_dir.exists(): + commands.append( + shlex.join([*copy_command_base, str(src_dir), str(install_dir)]) + ) + if libs_dir.exists(): + commands.append( + shlex.join([*copy_command_base, str(libs_dir), str(install_dir)]) + ) + + return commands + + +def get_venv_cleanup_commands(venv_path: pathlib.Path, *, keep_bins: bool) -> list[str]: + """Get a script do Charmcraft-specific venv cleanup. + + :param venv_path: The path to the venv. + :param keep_bins: Whether to keep the bin directory of the venv. + :returns: A shell script to do this, as a string. + """ + venv_bin = venv_path / "bin" + venv_lib64 = venv_path / "lib64" + delete_bins = [] if keep_bins else [f"rm -rf {venv_bin}"] + delete_lib64 = textwrap.dedent(f""" + if [ -L '{venv_lib64}' ]; then + rm -f '{venv_lib64}' + fi + """) + + return [*delete_bins, delete_lib64] diff --git a/charmcraft/utils/platform.py b/charmcraft/utils/platform.py index b5cd6da33..ab0836438 100644 --- a/charmcraft/utils/platform.py +++ b/charmcraft/utils/platform.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Platform-related Charmcraft utilities.""" + import dataclasses import pathlib import platform @@ -35,7 +36,9 @@ class OSPlatform: machine: str -def get_os_platform(filepath: pathlib.Path = pathlib.Path("/etc/os-release")) -> OSPlatform: +def get_os_platform( + filepath: pathlib.Path = pathlib.Path("/etc/os-release"), +) -> OSPlatform: """Determine a system/release combo for an OS using /etc/os-release if available.""" system = platform.system() release = platform.release() @@ -52,7 +55,9 @@ def get_os_platform(filepath: pathlib.Path = pathlib.Path("/etc/os-release")) -> return OSPlatform(system=system, release=release, machine=machine) -def validate_architectures(architectures: Iterable[str], *, allow_all: bool = False) -> None: +def validate_architectures( + architectures: Iterable[str], *, allow_all: bool = False +) -> None: """Validate that all architectures provided are valid architecture names.""" architectures = set(architectures) if allow_all and "all" in architectures: diff --git a/charmcraft/utils/project.py b/charmcraft/utils/project.py index 4c39591ec..68292b65d 100644 --- a/charmcraft/utils/project.py +++ b/charmcraft/utils/project.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Charm project related utilities.""" + import itertools import os import pathlib @@ -50,13 +51,17 @@ def find_charm_sources( lambda p: (p / const.CHARMCRAFT_FILENAME).exists(), outer_potential_paths ) for path in potential_paths: - if path in charm_paths.values(): # Symlinks can cause ignorable duplicate paths. + if ( + path in charm_paths.values() + ): # Symlinks can cause ignorable duplicate paths. continue try: charm_name = get_charm_name_from_path(path) except InvalidCharmPathError: continue - if charm_name not in charm_names: # We only care if the charm is listed for finding + if ( + charm_name not in charm_names + ): # We only care if the charm is listed for finding continue if charm_name != path.name: emit.verbose(f"Charm {charm_name!r} found in non-matching path {path}") diff --git a/charmcraft/utils/skopeo.py b/charmcraft/utils/skopeo.py index 3bdaebb06..bb0d15c68 100644 --- a/charmcraft/utils/skopeo.py +++ b/charmcraft/utils/skopeo.py @@ -70,7 +70,9 @@ def get_global_command(self) -> list[str]: command.append("--debug") return command - def _run_skopeo(self, command: Sequence[str], **kwargs) -> subprocess.CompletedProcess: + def _run_skopeo( + self, command: Sequence[str], **kwargs + ) -> subprocess.CompletedProcess: """Run skopeo, converting the error message if necessary.""" try: return subprocess.run(command, check=True, **kwargs) @@ -121,7 +123,12 @@ def copy( @overload def inspect( - self, image: str, *, format_template: None = None, raw: bool = False, tags: bool = True + self, + image: str, + *, + format_template: None = None, + raw: bool = False, + tags: bool = True, ) -> dict[str, Any]: ... @overload def inspect( diff --git a/charmcraft/utils/store.py b/charmcraft/utils/store.py index d1c69d7cb..bbe286a1f 100644 --- a/charmcraft/utils/store.py +++ b/charmcraft/utils/store.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Store helper utilities.""" + from collections.abc import Iterable from craft_store import endpoints @@ -24,6 +25,12 @@ def get_packages( ) -> list[endpoints.Package]: """Get a list of packages from charms and bundles.""" return [ - *(endpoints.Package(package_type="charm", package_name=charm) for charm in charms), - *(endpoints.Package(package_type="bundle", package_name=bundle) for bundle in bundles), + *( + endpoints.Package(package_type="charm", package_name=charm) + for charm in charms + ), + *( + endpoints.Package(package_type="bundle", package_name=bundle) + for bundle in bundles + ), ] diff --git a/charmcraft/utils/yaml.py b/charmcraft/utils/yaml.py index 13ac9b255..c976bd67f 100644 --- a/charmcraft/utils/yaml.py +++ b/charmcraft/utils/yaml.py @@ -49,7 +49,9 @@ def dump_yaml(data: Any) -> str: # noqa: ANN401: yaml.dump takes anything, so w """Dump a craft model to a YAML string.""" yaml.add_representer(str, _repr_str, Dumper=yaml.SafeDumper) yaml.add_representer( - pydantic.AnyHttpUrl, _repr_str, Dumper=yaml.SafeDumper # type: ignore[arg-type] + pydantic.AnyHttpUrl, + _repr_str, # type: ignore[arg-type] + Dumper=yaml.SafeDumper, ) yaml.add_representer( const.CharmArch, diff --git a/docs/conf.py b/docs/conf.py index 2cf633708..3349ec8e0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -58,6 +58,7 @@ extensions.extend( [ + "sphinx.ext.ifconfig", "sphinx.ext.intersphinx", "sphinx.ext.viewcode", "sphinx.ext.coverage", @@ -66,15 +67,59 @@ "sphinx_toolbox", "sphinx_toolbox.more_autodoc", "sphinx.ext.autodoc", # Must be loaded after more_autodoc - "sphinx_autodoc_typehints", # must be loaded after napoleon "sphinxcontrib.details.directive", "sphinx_toolbox.collapse", "sphinxcontrib.autodoc_pydantic", + "sphinxcontrib.details.directive", + "sphinx.ext.napoleon", + "sphinx_autodoc_typehints", # must be loaded after napoleon ] ) # endregion +exclude_patterns = [ + "_build", + "Thumbs.db", + ".DS_Store", + "env", + "sphinx-starter-pack", + # Excluded here because they are either included explicitly in other + # documents (so they generate "duplicate label" errors) or they aren't + # used in this documentation at all (so they generate "unreferenced" + # errors). + "common/craft-parts/explanation/lifecycle.rst", + "common/craft-parts/explanation/overlay_parameters.rst", + "common/craft-parts/explanation/overlays.rst", + "common/craft-parts/explanation/parts.rst", + "common/craft-parts/explanation/how_parts_are_built.rst", + "common/craft-parts/explanation/overlay_step.rst", + "common/craft-parts/how-to/craftctl.rst", + "common/craft-parts/how-to/include_files.rst", + "common/craft-parts/how-to/override_build.rst", + "common/craft-parts/reference/partition_specific_output_directory_variables.rst", + "common/craft-parts/reference/step_output_directories.rst", + "common/craft-parts/reference/plugins/ant_plugin.rst", + "common/craft-parts/reference/plugins/autotools_plugin.rst", + "common/craft-parts/reference/plugins/cmake_plugin.rst", + "common/craft-parts/reference/plugins/dotnet_plugin.rst", + "common/craft-parts/reference/plugins/go_plugin.rst", + "common/craft-parts/reference/plugins/make_plugin.rst", + "common/craft-parts/reference/plugins/maven_plugin.rst", + "common/craft-parts/reference/plugins/meson_plugin.rst", + "common/craft-parts/reference/plugins/npm_plugin.rst", + "common/craft-parts/reference/plugins/poetry_plugin.rst", + "common/craft-parts/reference/plugins/python_plugin.rst", + "common/craft-parts/reference/plugins/qmake_plugin.rst", + "common/craft-parts/reference/plugins/rust_plugin.rst", + "common/craft-parts/reference/plugins/scons_plugin.rst", + # Extra non-craft-parts exclusions can be added after this comment +] + +rst_epilog = """ +.. include:: /reuse/links.txt +""" + autodoc_default_options = {"exclude-members": "model_post_init"} # region Options for extensions @@ -84,6 +129,7 @@ intersphinx_mapping = { "python": ("https://docs.python.org/3", None), "craft-parts": ("https://canonical-craft-parts.readthedocs-hosted.com/en/latest/", None), + "rockcraft": ("https://documentation.ubuntu.com/rockcraft/en/stable/", None), } # See also: # https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#confval-intersphinx_disabled_reftypes @@ -119,3 +165,10 @@ def generate_cli_docs(nil): def setup(app): app.connect("builder-inited", generate_cli_docs) + + +# Setup libraries documentation snippets for use in charmcraft docs. +common_docs_path = pathlib.Path(__file__).parent / "common" +craft_parts_docs_path = pathlib.Path(craft_parts_docs.__file__).parent / "craft-parts" +(common_docs_path / "craft-parts").unlink(missing_ok=True) +(common_docs_path / "craft-parts").symlink_to(craft_parts_docs_path, target_is_directory=True) diff --git a/docs/explanation/index.rst b/docs/explanation/index.rst index eae2ed8e2..d590cad37 100644 --- a/docs/explanation/index.rst +++ b/docs/explanation/index.rst @@ -9,6 +9,10 @@ explanation is hosted on the `Charm SDK docs `_ ======================================================== diff --git a/docs/explanation/lifecycle.rst b/docs/explanation/lifecycle.rst new file mode 100644 index 000000000..eb45430ef --- /dev/null +++ b/docs/explanation/lifecycle.rst @@ -0,0 +1,45 @@ +.. _lifecycle: + +***************** +Lifecycle details +***************** + +Each part is built in :ref:`four separate steps `, each with +its own input and output locations: + +#. ``PULL`` — The source and external dependencies (such as package + dependencies) for the part are retrieved from their stated location and + placed into a package cache area. +#. ``BUILD`` — The part is built according to the particular part plugin and + build override. +#. ``STAGE`` — The specified outputs from the ``BUILD`` step are copied into + a unified staging area for all parts. +#. ``PRIME`` — The specified files are copied from the staging area to the + priming area for use in the final payload. This is distinct from ``STAGE`` + in that the ``STAGE`` step allows files that are used in the ``BUILD`` steps + of dependent parts to be accessed, while the ``PRIME`` step occurs after all + parts have been staged. + +.. note:: + While craft-parts offers an ``OVERLAY`` step as well, charmcraft does not use it. + This is a distinction between how Charmcraft and `Rockcraft`_ work. + +Step order +---------- + +While each part's steps are guaranteed to run in the order above, they are +not necessarily run immediately following each other, especially if multiple +parts are included in a project. While specifics are implementation-dependent, +the general rules for combining parts are: + +#. ``PULL`` all parts before running further steps. +#. ``BUILD`` any unbuilt parts whose dependencies have been staged. If a part + has no dependencies, this part is built in the first iteration. +#. ``STAGE`` any newly-built parts. +#. Repeat the ``BUILD`` and ``STAGE`` steps until all parts have been staged. +#. ``PRIME`` all parts. + +Further Information +------------------- + +Further information can be found in the `Craft-parts`_ documentation. diff --git a/docs/howto/charm-to-poetry.rst b/docs/howto/charm-to-poetry.rst new file mode 100644 index 000000000..5b2d3655d --- /dev/null +++ b/docs/howto/charm-to-poetry.rst @@ -0,0 +1,108 @@ +.. _howto-migrate-to-poetry: + +Migrate from the Charm plugin to the Poetry plugin +================================================== + +Many charms use `Poetry`_ to manage their Python projects. For these charms, Charmcraft +has a :ref:`craft_parts_poetry_plugin`. Migrating from the Charm plugin provides some +benefits, such as no longer having to maintain a ``requirements.txt`` file. If the +charm to be migrated does not currently use poetry, refer to the +`Poetry documentation `_ for instructions +on how to use poetry for a Python project. + +Update ``charmcraft.yaml`` +-------------------------- + +The first step is to update ``charmcraft.yaml`` to include the correct parts definition. +Depending on the history of a specific charm, it may not have an explicitly-included +``parts`` section determining how to build the charm. In this case, a ``parts`` section +can be created as follows: + +.. code-block:: yaml + + parts: + my-charm: # This can be named anything you want + plugin: poetry + source: . + +Select compatible versions of ``pip`` and ``poetry`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The Poetry plugin requires at least `pip 22.3 +`_, released in October 2022. If the +charm's base uses an older version of pip, a newer version can be installed in the +build environment using a dependency part. Likewise, a charm may require a newer +version of Poetry than is available in the distribution's repositories. The following +``parts`` section can be used in place of the section above to upgrade pip and Poetry +for charms that build on Ubuntu 22.04 or earlier: + +.. code-block:: yaml + :emphasize-lines: 2-9,11 + + parts: + poetry-deps: + plugin: nil + build-packages: + - curl + override-build: | + /usr/bin/python3 -m pip install pip==24.2 + curl -sSL https://install.python-poetry.org | python3 - + ln -sf $HOME/.local/bin/poetry /usr/local/bin/poetry + my-charm: # This can be named anything you want + after: [poetry-deps] + plugin: poetry + source: . + +Add optional dependency groups +------------------------------ + +If the charm has optional `dependency groups`_ that should be included when creating +the virtual environment, the ``poetry-with`` key can be used to include those groups +when creating the virtual environment. + +.. note:: + This is useful and encouraged, though not mandatory, for keeping track of + library dependencies, as covered in the next section. For an example, see + `postgresql-operator`_. + +Include charm library dependencies +---------------------------------- + +Unlike the Charm plugin, the Poetry plugin does not install the dependencies for +included charmlibs. If any of the charm libraries used have PYDEPS, these will +need to be added to the charm's dependencies, potentially as their own +`dependency group `_. + +To find these dependencies, check each library file for its ``PYDEPS``. A command +that can find these is:: + + find lib -name "*.py" -exec awk '/PYDEPS = \[/,/\]/' {} + + +If run from the base directory of a charm, this will show all the PYDEPS declarations +from all loaded charm libs. + +Include extra files +------------------- + +A Poetry plugin only includes the contents of the ``src`` and ``lib`` directories +as well as the generated virtual environment. If other files were previously included +from the main directory, they can be included again using the +:ref:`craft_parts_dump_plugin`: + +.. code-block:: yaml + :emphasize-lines: 5-9 + + parts: + my-charm: # This can be named anything you want + plugin: poetry + source: . + version-file: + plugin: dump + source: . + stage: + - charm_version + + +.. _dependency groups: https://python-poetry.org/docs/managing-dependencies/#dependency-groups +.. _Poetry: https://python-poetry.org +.. _postgresql-operator: https://github.com/canonical/postgresql-operator/blob/3c7c783d61d4bee4ce64c190a9f7d4a78048e4e7/pyproject.toml#L22-L35 diff --git a/docs/howto/charm-to-python.rst b/docs/howto/charm-to-python.rst new file mode 100644 index 000000000..cf530aa71 --- /dev/null +++ b/docs/howto/charm-to-python.rst @@ -0,0 +1,129 @@ +.. _howto-migrate-to-python: + +Migrate from the Charm plugin to the Python plugin +================================================== + +The Python plugin in Charmcraft offers a faster, stricter means of packing an operator +charm with a virtual environment. This guide shows how to migrate from a charm using +the default Charm plugin to using the Python plugin. + +Update ``charmcraft.yaml`` +-------------------------- + +The first step is to update ``charmcraft.yaml`` to include the correct parts definition. +Depending on the history of a specific charm, it may not have an explicitly-included +``parts`` section determining how to build the charm. In this case, a ``parts`` section +can be created as follows: + +.. code-block:: yaml + + parts: + my-charm: # This can be named anything you want + plugin: python + source: . + python-requirements: + - requirements.txt # Or whatever your requirements file is called. + +Select a compatible version of ``pip`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The Python plugin requires at least `pip 22.3`_, released in October 2022. If the +charm's base uses an older version of pip, a newer version can be installed in the +build environment using a dependency part. The following ``parts`` section can be +used in place of the section above to upgrade pip for charms that build on Ubuntu +22.04 or earlier: + +.. code-block:: yaml + :emphasize-lines: 2-5,7 + + parts: + python-deps: + plugin: nil + override-build: | + /usr/bin/python3 -m pip install pip==24.2 + my-charm: # This can be named anything you want + after: [python-deps] + plugin: python + source: . + python-requirements: + - requirements.txt # Or whatever your requirements file is called. + +Flatten ``requirements.txt`` +---------------------------- + +One difference between the Python plugin and the Charm plugin is that the Python +plugin does not install dependencies, so the ``requirements.txt`` file must be a +complete set of packages needed in the charm's virtual environment. + +.. note:: + There are several tools for creating an exhaustive ``requirements.txt`` file. + Charmcraft works with any as long as it generates a requirements file that ``pip`` + understands. Because different versions of packages may have different + dependencies, it is recommended that the requirements file be generated using a + tool that will lock the dependencies to specific versions. + A few examples include: + + - `uv export `_ + - `pip-compile `_ + - `pip freeze `_ + +A basic ``requirements.txt`` file for a charm with no dependencies other than the +Operator framework may look something like:: + + ops==2.17.0 + pyyaml==6.0.2 + websocket-client==1.8.0 + +To check that the virtual environment for the charm would be valid, activate an +empty virtual environment and then run:: + + pip install --no-deps -r requirements.txt + pip check + +Include charm library dependencies +---------------------------------- + +Unlike the Charm plugin, the Python plugin does not install the dependencies +for included charmlibs. If any of the charm libraries used have PYDEPS, these will +need to be added to a requirements file as well. + +.. note:: + All requirements files are included in the same ``pip`` command to prevent + conflicting requirements from overriding each other. However, this means + that a charm will fail to build if it has conflicting requirements. A single + ``requirements.txt`` file, while not mandatory, is recommended. + +To find these dependencies, check each library file for its ``PYDEPS``. A command +that can find these is:: + + find lib -name "*.py" -exec awk '/PYDEPS = \[/,/\]/' {} + + +If run from the base directory of a charm, this will show all the PYDEPS declarations +from all loaded charm libs, which can be used to help generate the input for a tool +that generates ``requirements.txt``. + +Include extra files +------------------- + +The Python plugin only includes the contents of the ``src`` and ``lib`` directories +as well as the generated virtual environment. If other files were previously included +from the main directory, they can be included again using the +:ref:`craft_parts_dump_plugin`: + +.. code-block:: yaml + :emphasize-lines: 7-11 + + parts: + my-charm: # This can be named anything you want + plugin: python + source: . + python-requirements: + - requirements.txt # Or whatever your requirements file is called. + version-file: + plugin: dump + source: . + stage: + - charm_version + + +.. _pip 22.3: https://pip.pypa.io/en/stable/news/#v22-3 diff --git a/docs/howto/index.rst b/docs/howto/index.rst new file mode 100644 index 000000000..9bf000898 --- /dev/null +++ b/docs/howto/index.rst @@ -0,0 +1,11 @@ +.. _howto: + +How-To +****** + +.. toctree:: + :maxdepth: 2 + + charm-to-poetry + charm-to-python + shared-cache diff --git a/docs/howto/shared-cache.rst b/docs/howto/shared-cache.rst new file mode 100644 index 000000000..9fe793c68 --- /dev/null +++ b/docs/howto/shared-cache.rst @@ -0,0 +1,78 @@ +.. _howto-shared-cache: + +Cache intermediate build artefacts +================================== + +Because Charmcraft builds Python packages from source rather than using pre-built +wheels, the initial builds of charms can take a while. The intermediate artefacts +get cached, which significantly speeds up subsequent builds. + +When installed as a snap, Charmcraft automatically caches these wheels in the +``~/snap/charmcraft/common/cache`` directory. However, in some cases, it may be +beneficial to change this directory. + +This can be especially useful in CI, where you may wish to specify a directory that +gets cached between CI runs. + +Local usage +----------- + +When packing locally, you can change where Charmcraft caches build artefacts by setting +the ``CRAFT_SHARED_CACHE`` environment variable to the path of an existing directory to +use instead:: + + mkdir -p /tmp/charmcraft + CRAFT_SHARED_CACHE=/tmp/charmcraft charmcraft pack + +On GitHub +--------- + +While it's recommended that you use the ``charmcraft/pack`` action from +`craft-actions`_ where possible, the following workflow will manually pack a charm, +caching the intermediate files: + +.. code-block:: yaml + + name: Pack charm + on: + pull_request: + jobs: + pack: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: canonical/craft-actions/charmcraft/setup + - uses: actions/cache@v4 + with: + path: ${{ runner.temp }} + key: charmcraft-cache-${{ hashfiles('requirements.txt') }} + restore-keys: | + charmcraft-cache- + - env: + CRAFT_SHARED_CACHE: ${{ runner.temp } + run: | + charmcraft pack + +On GitLab +--------- + +The following example ``gitlab-ci.yml`` will install and run Charmcraft to pack your +charm, caching the intermediate artefacts: + +.. code-block:: yaml + + pack-charm: + cache: + - key: + files: + - requirements.txt + paths: + - .charmcraft_cache/ + variables: + CRAFT_SHARED_CACHE: .charmcraft_cache/ + script: + - mkdir -p .charmcraft_cache + - snap install charmcraft + - charmcraft pack + +.. _craft-actions: https://github.com/canonical/craft-actions diff --git a/docs/index.rst b/docs/index.rst index b0bd33862..4e9931d9f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -10,9 +10,20 @@ Most of Charmcraft's documentation is available there. :maxdepth: 1 :hidden: + howto/index reference/index explanation/index +.. grid:: 1 1 2 2 + + .. grid-item-card:: `Tutorial `_ + + **Get started** with a hands-on introduction to Charmcraft + + .. grid-item-card:: :ref:`How-to guides ` + + **Step-by-step guides** covering key operations and common tasks + .. grid:: 1 1 2 2 :reverse: diff --git a/docs/reference/changelog.rst b/docs/reference/changelog.rst index 62cff97d1..12cac5a82 100644 --- a/docs/reference/changelog.rst +++ b/docs/reference/changelog.rst @@ -77,6 +77,120 @@ Changelog For a complete list of commits, see the `X.Y.Z`_ release on GitHub. +X.Y.Z (2024-MM-DD) +------------------ + +Command line +============ + +The pack command now updates charm the libs in the project directory if they don't meet +the requirements in the ``charm-libs`` key of ``charmcraft.yaml``. + +3.2.2 (2024-10-16) +------------------ + +- The ``whoami`` command now works with charm-scoped credentials. + +For a complete list of commits, see the `3.2.2`_ release on GitHub. + +3.2.1 (2024-09-16) +------------------ + +This is a bugfix release for 3.2, bringing in two fixes: + +Core +==== + +The shared cache directory now gets locked. Builds that run while another copy of +Charmcraft has the cache directory locked will run without a shared cache. + +Plugins +####### + +charm +""""" + +The charm plugin will now force-install pip if the installed venv version is older +than the minimum version, guaranteeing that pip gets updated correctly. + +For a complete list of commits, see the `3.2.1`_ release on GitHub. + +2.7.4 (2024-10-07) +------------------ + +This release bumps some dependencies to fix a security issue with requests. + +For a complete list of commits, see the `2.7.4`_ release on GitHub. + +2.7.3 (2024-09-16) +------------------ + +Core +==== + +The shared cache directory now gets locked. Builds that run while another copy of +Charmcraft has the cache directory locked will run without a shared cache. + +The charm plugin now force-reinstalls pip when necessary, guaranteeing a correct +version of pip. + +For a complete list of commits, see the `2.7.3`_ release on GitHub. + +2.7.2 (2024-09-09) +------------------ + +We've backported some 3.x bugfixes to the 2.7 series. + +Store +===== + +Skopeo now uses an insecure policy when copying OCI images, allowing it to run +even when the user hasn't set up OCI image policies. + +Meta +==== + +Build fixes to the published version + +For a complete list of commits, see the `2.7.2`_ release on GitHub. + + +3.2.0 (2024-08-28) +------------------ + +We have some fixes to the 3.1 series, as well as the features below. +The most notable under-the-hood work is that Charmcraft now uses pydantic 2. + +Core +==== + +You can now set ``charm-user`` in ``charmcraft.yaml`` to set what user Juju 3.6.0+ will +use for running a kubernetes charm. + +Plugins +####### + +reactive +"""""""" + +Fix: ``actions.yaml`` is no longer overwritten. + +Extensions +########## + +go-framework +"""""""""""" + +New ``go-framework`` extension for easily charming go applications. + +Documentation +============= + +The changelog is now included in the Charmcraft documentation. For completeness, we've +back-filled the log with all the important changes from previous releases documented +on GitHub. + +For a complete list of commits, see the `3.2.0`_ release on GitHub. 3.1.2 (2024-08-07) ------------------ @@ -252,7 +366,13 @@ page. .. _2.6.0: https://github.com/canonical/charmcraft/releases/tag/2.6.0 .. _2.7.0: https://github.com/canonical/charmcraft/releases/tag/2.7.0 .. _2.7.1: https://github.com/canonical/charmcraft/releases/tag/2.7.1 +.. _2.7.2: https://github.com/canonical/charmcraft/releases/tag/2.7.2 +.. _2.7.3: https://github.com/canonical/charmcraft/releases/tag/2.7.3 +.. _2.7.4: https://github.com/canonical/charmcraft/releases/tag/2.7.4 .. _3.0.0: https://github.com/canonical/charmcraft/releases/tag/3.0.0 .. _3.1.0: https://github.com/canonical/charmcraft/releases/tag/3.1.0 .. _3.1.1: https://github.com/canonical/charmcraft/releases/tag/3.1.1 .. _3.1.2: https://github.com/canonical/charmcraft/releases/tag/3.1.2 +.. _3.2.0: https://github.com/canonical/charmcraft/releases/tag/3.2.0 +.. _3.2.1: https://github.com/canonical/charmcraft/releases/tag/3.2.1 +.. _3.2.2: https://github.com/canonical/charmcraft/releases/tag/3.2.2 diff --git a/docs/reference/index.rst b/docs/reference/index.rst index 6b63de1bc..ccb31aa63 100644 --- a/docs/reference/index.rst +++ b/docs/reference/index.rst @@ -12,4 +12,6 @@ data can be found in the `Charm SDK docs `_. commands models/index + parts + plugins/index changelog diff --git a/docs/reference/parts.rst b/docs/reference/parts.rst new file mode 100644 index 000000000..4ea8ae85e --- /dev/null +++ b/docs/reference/parts.rst @@ -0,0 +1,14 @@ +.. _parts: + +Parts +***** + +Parts, powered by :external+craft-parts:ref:`craft-parts `, power the build +system that charmcraft uses. + +.. toctree:: + :maxdepth: 1 + + /common/craft-parts/reference/part_properties + /common/craft-parts/reference/parts_steps + /common/craft-parts/reference/step_execution_environment diff --git a/docs/reference/plugins/index.rst b/docs/reference/plugins/index.rst new file mode 100644 index 000000000..dce9a13a3 --- /dev/null +++ b/docs/reference/plugins/index.rst @@ -0,0 +1,27 @@ +.. _plugins: + +Parts plugins +************* + +Most charms only need one, maybe two parts, typically consisting of one of Charmcraft's +application-specific plugins such as the `charm plugin`_ or the `reactive plugin`_ and +potentially the addition of further files using the :ref:`craft_parts_dump_plugin`. + +.. toctree:: + :maxdepth: 1 + + /common/craft-parts/reference/plugins/dump_plugin + /common/craft-parts/reference/plugins/nil_plugin + python_plugin + poetry_plugin + +.. warning:: + Other plugins are available from :external+craft-parts:ref:`craft-parts `, + but these are unsupported in Charmcraft and should be used with caution. + + These plugins may significantly increase the size of a packed charm, and they may + not work as intended. Please file a `feature request`_ in Charmcraft if you have a + use case for another craft-parts upstream plugin. + +.. _charm plugin: https://juju.is/docs/sdk/charmcraft-yaml#heading--the-charm-plugin +.. _reactive plugin: https://juju.is/docs/sdk/charmcraft-yaml#heading--the-reactive-plugin diff --git a/docs/reference/plugins/poetry-charmcraft.yaml b/docs/reference/plugins/poetry-charmcraft.yaml new file mode 100644 index 000000000..86fb0b80b --- /dev/null +++ b/docs/reference/plugins/poetry-charmcraft.yaml @@ -0,0 +1,13 @@ +name: my-charm +type: charm +title: My poetry charm +summary: An operator charm using Poetry. +description: | + An operator charm that uses Poetry for its project. +base: ubuntu@24.04 +platforms: + amd64: +parts: + my-charm: + source: . + plugin: poetry diff --git a/docs/reference/plugins/poetry_plugin.rst b/docs/reference/plugins/poetry_plugin.rst new file mode 100644 index 000000000..2f6c3dba6 --- /dev/null +++ b/docs/reference/plugins/poetry_plugin.rst @@ -0,0 +1,51 @@ +.. _craft_parts_poetry_plugin: + +Poetry plugin +============= + +The Poetry plugin can be used for Python charms written using `Poetry`_ and the +`Operator framework`_. + +.. include:: /common/craft-parts/reference/plugins/poetry_plugin.rst + :start-after: .. _craft_parts_poetry_plugin-keywords: + :end-before: .. _craft_parts_poetry_plugin-environment_variables: + +python-keep-bins +~~~~~~~~~~~~~~~~ +**Type**: boolean +**Default**: False + +Whether to keep python scripts in the virtual environment's ``bin`` directory. + +.. include:: /common/craft-parts/reference/plugins/poetry_plugin.rst + :start-after: .. _craft_parts_poetry_plugin-environment_variables: + :end-before: .. _poetry-details-end: + +How it works +------------ + +During the build step, the plugin performs the following actions: + +1. It creates a virtual environment in the + :ref:`${CRAFT_PART_INSTALL}/venv ` directory. +2. It uses :command:`poetry export` to create a ``requirements.txt`` in the project's + build directory. +3. It uses :command:`pip` to install the packages referenced in ``requirements.txt`` + into the virtual environment. Undeclared dependencies are ignored. +4. It copies any existing ``src`` and ``lib`` directories from your charm project into + the final charm. +5. It runs :command:`pip check` to ensure the virtual environment is consistent. + +Example +------- + +The following ``charmcraft.yaml`` file can be used with a poetry project to build +the charm for Ubuntu 24.04: + +.. literalinclude:: poetry-charmcraft.yaml + :language: yaml + + +.. _Poetry: https://python-poetry.org +.. _dependency groups: https://python-poetry.org/docs/managing-dependencies#dependency-groups +.. _environment variables to configure Poetry: https://python-poetry.org/docs/configuration/#using-environment-variables diff --git a/docs/reference/plugins/python-charmcraft.yaml b/docs/reference/plugins/python-charmcraft.yaml new file mode 100644 index 000000000..3636faff3 --- /dev/null +++ b/docs/reference/plugins/python-charmcraft.yaml @@ -0,0 +1,15 @@ +name: my-charm +type: charm +title: My Python charm +summary: An operator charm that uses the python plugin +description: | + An operator charm using the Python plugin. +base: ubuntu@24.04 +platforms: + amd64: +parts: + my-charm: + source: . + plugin: python + python-requirements: + - requirements.txt diff --git a/docs/reference/plugins/python_plugin.rst b/docs/reference/plugins/python_plugin.rst new file mode 100644 index 000000000..3b9c8a256 --- /dev/null +++ b/docs/reference/plugins/python_plugin.rst @@ -0,0 +1,61 @@ +.. _craft_parts_python_plugin: + +Python plugin +============= + +The Python plugin builds charms written in Python. It's typically +used in conjunction with the `Operator framework`_. + +.. include:: /common/craft-parts/reference/plugins/python_plugin.rst + :start-after: .. _craft_parts_python_plugin-keywords: + :end-before: .. _craft_parts_python_plugin-environment_variables: + +python-keep-bins +~~~~~~~~~~~~~~~~ +**Type**: boolean +**Default**: False + +Whether to keep python scripts in the virtual environment's ``bin`` directory. + +.. include:: /common/craft-parts/reference/plugins/python_plugin.rst + :start-after: .. _craft_parts_python_plugin-environment_variables: + :end-before: .. _python-details-begin: + +Dependencies +------------ + +This plugin creates a Python virtual environment in the ``venv`` directory of your +charm using the version of Python included with your base and the requirements files +provided in the ``python-requirements`` key. + +.. note:: + The python plugin prevents :command:`pip` from installing dependencies for the + required packages. Therefore, requirements must include indirect dependencies as + well as direct dependencies. It is recommended that you use a tool such as + :command:`pip-compile` or :command:`uv` to manage the contents of your + ``requirements.txt`` file. + +How it works +------------ + +During the build step, the plugin performs the following actions: + +1. It creates a virtual environment in the + :ref:`${CRAFT_PART_INSTALL}/venv ` + directory. +2. It uses :command:`pip` to install the required Python packages specified + by the ``python-requirements``, ``python-constraints`` and ``python-packages`` + keys. +4. It copies any existing ``src`` and ``lib`` directories from your charm project into + the final charm. + +Example +------- + +The following ``charmcraft.yaml`` file can be used with a standard charm structure +to build a charm for Ubuntu 24.04: + +.. literalinclude:: python-charmcraft.yaml + :language: yaml + + diff --git a/docs/reuse/links.txt b/docs/reuse/links.txt new file mode 100644 index 000000000..c6cf96137 --- /dev/null +++ b/docs/reuse/links.txt @@ -0,0 +1,18 @@ +.. _Charmcraft: https://canonical-charmcraft.readthedocs-hosted.com +.. _Chisel: https://github.com/canonical/chisel +.. _`Chisel releases`: https://github.com/canonical/chisel-releases +.. _`Craft-parts`: https://canonical-craft-parts.readthedocs-hosted.com +.. _Docker: https://docs.docker.com/ +.. _`feature request`: https://github.com/canonical/charmcraft/issues/new?assignees=&labels=Enhancement&projects=&template=task.yaml +.. _`OCI archive format`: https://github.com/opencontainers/image-spec/blob/main/layer.md#distributable-format +.. _OCI_image_spec: https://github.com/opencontainers/image-spec/blob/main/spec.md +.. _`OCI layers`: https://github.com/opencontainers/image-spec/blob/main/layer.md +.. _`Operator framework`: https://juju.is/docs/sdk/ops +.. _LXD: https://canonical.com/lxd +.. _Multipass: https://multipass.run/docs +.. _`Open Container Initiative`: https://opencontainers.org/ +.. _Rockcraft: https://documentation.ubuntu.com/rockcraft/ +.. _skopeo: https://github.com/containers/skopeo +.. _Snapcraft: https://snapcraft.io/docs/snapcraft-overview + +.. Potentially use a glossary to create indirect references to explanations. diff --git a/osv-scanner.toml b/osv-scanner.toml new file mode 100644 index 000000000..17da2fac1 --- /dev/null +++ b/osv-scanner.toml @@ -0,0 +1,4 @@ +[[IgnoredVulns]] +id = "CVE-2024-35195" +ignoreUntil = "2025-01-01T00:00:00Z" +reason = "Needed for requests-unixsocket, which we're replacing with requests-unixsocket2" diff --git a/pyproject.toml b/pyproject.toml index 597f305bb..821beff30 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,12 +4,12 @@ dynamic = ["version"] description = "The main tool to build, upload, and develop in general the Juju charms." readme = "README.md" dependencies = [ - "craft-application~=4.1", + "craft-application~=4.2", "craft-cli>=2.3.0", "craft-grammar>=2.0.0", - "craft-parts>=2.0.0", + "craft-parts>=2.1.0", "craft-providers>=2.0.0", - "craft-platforms~=0.1", + "craft-platforms~=0.3", "craft-providers>=2.0.0", "craft-store>=3.0.0", "distro>=1.3.0", @@ -22,13 +22,9 @@ dependencies = [ "pyyaml", "requests", "requests-toolbelt", - "requests-unixsocket", "snap-helpers", "tabulate", - # Needed until requests-unixsocket supports urllib3 v2 - # https://github.com/msabramo/requests-unixsocket/pull/69 - # When updating, remove the urllib3 constraint from renovate config. - "urllib3<2.0", + "pip>=24.2", ] classifiers = [ "Development Status :: 5 - Production/Stable", @@ -48,10 +44,8 @@ charmcraft = "charmcraft.application.main:main" [project.optional-dependencies] dev = [ # When updating these, also update the dev/lint/types groups in renovate. "coverage", - "flake8", "freezegun", "hypothesis", - "pydocstyle", "pyfakefs", "pylint", "pytest", @@ -62,13 +56,12 @@ dev = [ # When updating these, also update the dev/lint/types groups in renovat "responses", ] lint = [ - "black~=24.0", "codespell[tomli]", "yamllint", ] types = [ - "mypy[reports]~=1.5", - "pyright==1.1.366", + "mypy[reports]~=1.11", + "pyright==1.1.383", "types-python-dateutil", "types-PyYAML", "types-requests<2.31.0.20240312", # Frozen until we can get urllib3 v2 @@ -80,7 +73,7 @@ apt = [ "python-apt>=2.4.0;sys_platform=='linux'" ] docs = [ - "canonical-sphinx~=0.1", + "canonical-sphinx[full]~=0.2", "pyspelling", "autodoc-pydantic~=2.0", "sphinx-autobuild~=2024.2", @@ -118,10 +111,6 @@ git_describe_command = "git describe --long --match '[0-9]*.[0-9]*.[0-9]*' --exc include = ["*craft*"] namespaces = false -[tool.black] -target-version = ["py310", "py311"] -line-length = 99 - [tool.codespell] ignore-words-list = "buildd,crate,keyserver,comandos,ro,dedent,dedented,tread,socio-economic" skip = "requirements*.txt,.tox,.git,build,.*_cache,__pycache__,*.tar,*.snap,*.png,./node_modules,./docs/_build,.direnv,.venv,venv,.vscode,charmcraft.spec" @@ -330,6 +319,17 @@ lint.ignore = [ # Allow Pydantic's `@validator` decorator to trigger class method treatment. classmethod-decorators = ["pydantic.validator"] +[tool.ruff.lint.pydocstyle] +ignore-decorators = [ # Functions with these decorators don't have to have docstrings. + "typing.overload", # Default configuration + # The next four are all variations on override, so child classes don't have to + # repeat parent classes' docstrings. + "overrides.override", + "overrides.overrides", + "typing.override", + "typing_extensions.override", +] + [tool.ruff.lint.per-file-ignores] "tests/**.py" = [ # Some things we want for the moin project are unnecessary in tests. "D", # Ignore docstring rules in tests diff --git a/requirements-dev.txt b/requirements-dev.txt index 06e8040a8..80597a400 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,92 +1,92 @@ annotated-types==0.7.0 -astroid==3.2.4 +astroid==3.3.5 attrs==24.2.0 +backports-tarfile==1.2.0 boolean-py==4.0 -certifi==2024.7.4 -cffi==1.17.0 -charset-normalizer==3.3.2 -coverage==7.6.1 -craft-application==4.1.0 -craft-archives==2.0.0 -craft-cli==2.6.0 -craft-grammar==2.0.0 -craft-parts==2.0.0 -craft-platforms==0.1.1 -craft-providers==2.0.0 -craft-store==3.0.0 -cryptography==43.0.0 -dill==0.3.8 +certifi==2024.8.30 +cffi==1.17.1 +chardet==5.2.0 +charset-normalizer==3.4.0 +coverage==7.6.7 +craft-application==4.4.0 +craft-archives==2.0.1 +craft-cli==2.10.1 +craft-grammar==2.0.1 +craft-parts==2.1.3 +craft-platforms==0.4.0 +craft-providers==2.0.4 +craft-store==3.0.2 +cryptography==43.0.3 +dill==0.3.9 distro==1.9.0 docker==7.1.0 -flake8==7.1.1 +exceptiongroup==1.2.2 freezegun==1.5.1 httplib2==0.22.0 -humanize==4.10.0 -hypothesis==6.111.0 -idna==3.7 -importlib-metadata==8.2.0 +humanize==4.11.0 +hypothesis==6.119.4 +idna==3.10 +importlib-metadata==8.5.0 iniconfig==2.0.0 isort==5.13.2 jaraco-classes==3.4.0 +jaraco-context==6.0.1 +jaraco-functools==4.1.0 jeepney==0.8.0 jinja2==3.1.4 jsonschema==4.23.0 -jsonschema-specifications==2023.12.1 -keyring==24.3.1 +jsonschema-specifications==2024.10.1 +keyring==25.5.0 launchpadlib==2.0.0 lazr-restfulclient==0.14.6 lazr-uri==1.0.6 -license-expression==30.3.1 +license-expression==30.4.0 macaroonbakery==1.3.4 -markupsafe==2.1.5 +markupsafe==3.0.2 mccabe==0.7.0 -more-itertools==10.4.0 +more-itertools==10.5.0 oauthlib==3.2.2 overrides==7.7.0 -packaging==24.1 -platformdirs==4.2.2 +packaging==24.2 +pip==24.3.1 +platformdirs==4.3.6 pluggy==1.5.0 -protobuf==5.27.3 -pycodestyle==2.12.1 +protobuf==5.28.3 pycparser==2.22 -pydantic==2.8.2 -pydantic-core==2.20.1 -pydantic-yaml==1.3.0 -pydocstyle==6.3.0 -pyfakefs==5.6.0 -pyflakes==3.2.0 +pydantic==2.9.2 +pydantic-core==2.23.4 +pyfakefs==5.7.1 pygit2==1.14.1 -pylint==3.2.6 +pylint==3.3.1 pymacaroons==0.13.0 pynacl==1.5.0 -pyparsing==3.1.2 +pyparsing==3.2.0 pyrfc3339==1.1 -pytest==8.3.2 -pytest-check==2.3.1 -pytest-cov==5.0.0 +pytest==8.3.3 +pytest-check==2.4.1 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-subprocess==1.5.2 python-dateutil==2.9.0.post0 -pytz==2024.1 +python-debian==0.1.49 +pytz==2024.2 pyxdg==0.28 pyyaml==6.0.2 referencing==0.35.1 -requests==2.31.0 +requests==2.32.3 requests-toolbelt==1.0.0 -requests-unixsocket==0.3.0 +requests-unixsocket2==0.4.2 responses==0.25.3 -rpds-py==0.20.0 -ruamel-yaml==0.18.6 -ruamel-yaml-clib==0.2.8 +rpds-py==0.21.0 secretstorage==3.3.3 -setuptools==72.2.0 +setuptools==75.6.0 six==1.16.0 snap-helpers==0.4.2 -snowballstemmer==2.2.0 sortedcontainers==2.4.0 tabulate==0.9.0 +tomli==2.1.0 tomlkit==0.13.2 typing-extensions==4.12.2 -urllib3==1.26.19 -wadllib==1.3.6 -zipp==3.20.0 +urllib3==2.2.3 +wadllib==2.0.0 +zipp==3.21.0 diff --git a/requirements.txt b/requirements.txt index 70d1f985f..dc5b6a33d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,68 +1,71 @@ annotated-types==0.7.0 attrs==24.2.0 +backports-tarfile==1.2.0 boolean-py==4.0 -certifi==2024.7.4 -cffi==1.17.0 -charset-normalizer==3.3.2 -craft-application==4.1.0 -craft-archives==2.0.0 -craft-cli==2.6.0 -craft-grammar==2.0.0 -craft-parts==2.0.0 -craft-platforms==0.1.1 -craft-providers==2.0.0 -craft-store==3.0.0 -cryptography==43.0.0 +certifi==2024.8.30 +cffi==1.17.1 +chardet==5.2.0 +charset-normalizer==3.4.0 +craft-application==4.4.0 +craft-archives==2.0.1 +craft-cli==2.10.1 +craft-grammar==2.0.1 +craft-parts==2.1.3 +craft-platforms==0.4.0 +craft-providers==2.0.4 +craft-store==3.0.2 +cryptography==43.0.3 distro==1.9.0 docker==7.1.0 httplib2==0.22.0 -humanize==4.10.0 -idna==3.7 -importlib-metadata==8.2.0 +humanize==4.11.0 +idna==3.10 +importlib-metadata==8.5.0 jaraco-classes==3.4.0 +jaraco-context==6.0.1 +jaraco-functools==4.1.0 jeepney==0.8.0 jinja2==3.1.4 jsonschema==4.23.0 -jsonschema-specifications==2023.12.1 -keyring==24.3.1 +jsonschema-specifications==2024.10.1 +keyring==25.5.0 launchpadlib==2.0.0 lazr-restfulclient==0.14.6 lazr-uri==1.0.6 -license-expression==30.3.1 +license-expression==30.4.0 macaroonbakery==1.3.4 -markupsafe==2.1.5 -more-itertools==10.4.0 +markupsafe==3.0.2 +more-itertools==10.5.0 oauthlib==3.2.2 overrides==7.7.0 -packaging==24.1 -platformdirs==4.2.2 -protobuf==5.27.3 +packaging==24.2 +pip==24.3.1 +platformdirs==4.3.6 +protobuf==5.28.3 pycparser==2.22 -pydantic==2.8.2 -pydantic-core==2.20.1 -pydantic-yaml==1.3.0 +pydantic==2.9.2 +pydantic-core==2.23.4 pygit2==1.14.1 pymacaroons==0.13.0 pynacl==1.5.0 -pyparsing==3.1.2 +pyparsing==3.2.0 pyrfc3339==1.1 python-dateutil==2.9.0.post0 -pytz==2024.1 +python-debian==0.1.49 +pytz==2024.2 pyxdg==0.28 pyyaml==6.0.2 referencing==0.35.1 -requests==2.31.0 +requests==2.32.3 requests-toolbelt==1.0.0 -requests-unixsocket==0.3.0 -rpds-py==0.20.0 -ruamel-yaml==0.18.6 -ruamel-yaml-clib==0.2.8 +requests-unixsocket2==0.4.2 +rpds-py==0.21.0 secretstorage==3.3.3 -setuptools==72.2.0 +setuptools==75.6.0 six==1.16.0 snap-helpers==0.4.2 tabulate==0.9.0 typing-extensions==4.12.2 -urllib3==1.26.19 -wadllib==1.3.6 -zipp==3.20.0 +urllib3==2.2.3 +wadllib==2.0.0 +zipp==3.21.0 diff --git a/snap/hooks/configure b/snap/hooks/configure index 3594428e0..9683bbf55 100755 --- a/snap/hooks/configure +++ b/snap/hooks/configure @@ -73,6 +73,10 @@ def _delete_lxd_instance(instance: dict) -> None: print(f"Failed to remove LXD instance {instance['name']}.", file=sys.stderr) +def _has_lxd() -> bool: + return subprocess.run(["snap", "list", "lxd"]).returncode == 0 + + def configure_hook_main(): # Unique valid base instances directory to prevent duplication. image_slots = {} @@ -85,6 +89,10 @@ def configure_hook_main(): print(f"Unsupported snap configuration: {reason}.", file=sys.stderr) sys.exit(1) + if not _has_lxd(): + print("LXD is not installed.", file=sys.stderr) + return + # Remove only base images in LXD related project try: lxd_images_json = subprocess.check_output( diff --git a/snap/hooks/remove b/snap/hooks/remove index 907896737..77332f2ec 100755 --- a/snap/hooks/remove +++ b/snap/hooks/remove @@ -9,7 +9,15 @@ import sys PROJECT_NAME = "charmcraft" +def _has_lxd() -> bool: + return subprocess.run(["snap", "list", "lxd"]).returncode == 0 + + def remove_hook_main(): + if not _has_lxd(): + print("LXD is not installed.", file=sys.stderr) + return + # Remove all images in LXD related project try: lxd_images_json = subprocess.check_output( diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml index 6edca969a..45044c69e 100644 --- a/snap/snapcraft.yaml +++ b/snap/snapcraft.yaml @@ -187,7 +187,7 @@ parts: build-packages: - golang-go build-environment: - - CGO_ENABLED: 0 + - CGO_ENABLED: "0" stage: - -bin/humbox diff --git a/spread.yaml b/spread.yaml index 55ea1927b..4083ee765 100644 --- a/spread.yaml +++ b/spread.yaml @@ -202,12 +202,9 @@ suites: # https://github.com/canonical/lxd-cloud/blob/f20a64a8af42485440dcbfd370faf14137d2f349/test/includes/lxd.sh#L13-L23 iptables -P FORWARD ACCEPT - # Ensure that the reused charms and bundles are registered if necessary. + # Ensure that the reused charms are registered if necessary. if ! charmcraft status "${CHARM_DEFAULT_NAME}"; then - charmcraft register $CHARM_DEFAULT_NAME || ERROR Charm $CHARM_DEFAULT_NAME cannot be registered to this account. - fi - if ! charmcraft status $BUNDLE_DEFAULT_NAME; then - charmcraft register-bundle $BUNDLE_DEFAULT_NAME || ERROR Charm $BUNDLE_DEFAULT_NAME cannot be registered to this account. + charmcraft register $CHARM_DEFAULT_NAME fi rm -f charmcraft.yaml @@ -226,6 +223,7 @@ suites: # should be part of the environment (when running spread locally just define it, # for GH actions set it in Settings -> Security -> Actions -> Repository secrets) CHARMCRAFT_AUTH: "$(HOST: echo $CHARMCRAFT_AUTH)" + CHARMCRAFT_SINGLE_CHARM_AUTH: "$(HOST: echo $CHARMCRAFT_SINGLE_CHARM_AUTH)" # to not flood Charmhub with names the same two are always used in the Store related # tests (except in the names registration tests, of course); register them manually diff --git a/tests/commands/test_store_api.py b/tests/commands/test_store_api.py index 11a22d9a4..4c9cc722f 100644 --- a/tests/commands/test_store_api.py +++ b/tests/commands/test_store_api.py @@ -52,7 +52,10 @@ def client_mock(monkeypatch): """Fixture to provide a mocked client.""" monkeypatch.setattr(platform, "node", lambda: "fake-host") client_mock = MagicMock(spec=Client) - with patch("charmcraft.store.store.Client", lambda api, storage, ephemeral=True: client_mock): + with patch( + "charmcraft.store.store.Client", + lambda api, storage, ephemeral=True: client_mock, + ): yield client_mock @@ -287,14 +290,16 @@ def test_auth_bad_credentials(charmhub_config, monkeypatch): Store(charmhub_config) assert ( - str(error.value) == "Credentials could not be parsed. Expected base64 encoded credentials." + str(error.value) + == "Credentials could not be parsed. Expected base64 encoded credentials." ) def test_no_keyring(charmhub_config): """Verify CraftStore is raised from Store when no keyring is available.""" with patch( - "craft_store.StoreClient.__init__", side_effect=craft_store.errors.NoKeyringError() + "craft_store.StoreClient.__init__", + side_effect=craft_store.errors.NoKeyringError(), ): with pytest.raises(CraftError) as error: Store(charmhub_config) @@ -497,7 +502,9 @@ def test_register_name(client_mock, charmhub_config): result = store.register_name("testname", "stuff") assert client_mock.mock_calls == [ - call.request_urlpath_json("POST", "/v1/charm", json={"name": "testname", "type": "stuff"}), + call.request_urlpath_json( + "POST", "/v1/charm", json={"name": "testname", "type": "stuff"} + ), ] assert result is None @@ -512,7 +519,9 @@ def test_register_name_unauthorized_logs_in(client_mock, charmhub_config): store.register_name("testname", "stuff") assert client_mock.mock_calls == [ - call.request_urlpath_json("POST", "/v1/charm", json={"name": "testname", "type": "stuff"}), + call.request_urlpath_json( + "POST", "/v1/charm", json={"name": "testname", "type": "stuff"} + ), call.logout(), call.login( ttl=108000, @@ -524,7 +533,9 @@ def test_register_name_unauthorized_logs_in(client_mock, charmhub_config): "package-view", ], ), - call.request_urlpath_json("POST", "/v1/charm", json={"name": "testname", "type": "stuff"}), + call.request_urlpath_json( + "POST", "/v1/charm", json={"name": "testname", "type": "stuff"} + ), ] @@ -546,9 +557,13 @@ def test_unregister_name_success(client_mock, charmhub_config): id="unknown_name", ), pytest.param( - FakeResponse("discharge required", 401), StoreServerError, id="discharge_required" + FakeResponse("discharge required", 401), + StoreServerError, + id="discharge_required", + ), + pytest.param( + FakeResponse("Unauthorized", 401), StoreServerError, id="Unauthorized" ), - pytest.param(FakeResponse("Unauthorized", 401), StoreServerError, id="Unauthorized"), pytest.param( FakeResponse("Cannot unregister a package with existing revisions", 403), CraftError, @@ -588,7 +603,9 @@ def test_unregister_name_errors( ), ], ) -def test_unregister_name_login(client_mock, charmhub_config, http_response: FakeResponse): +def test_unregister_name_login( + client_mock, charmhub_config, http_response: FakeResponse +): """Retry login when registering a name.""" client_mock.unregister_name.side_effect = [StoreServerError(http_response), None] @@ -713,7 +730,9 @@ def test_upload_straightforward(client_mock, emitter, charmhub_config): test_revision = 123 test_status_ok = "test-status" status_response = { - "revisions": [{"status": test_status_ok, "revision": test_revision, "errors": None}] + "revisions": [ + {"status": test_status_ok, "revision": test_revision, "errors": None} + ] } client_mock.request_urlpath_json.side_effect = [ @@ -732,7 +751,9 @@ def test_upload_straightforward(client_mock, emitter, charmhub_config): assert client_mock.mock_calls == [ call.whoami(), call.push_file(test_filepath), - call.request_urlpath_json("POST", test_endpoint, json={"upload-id": test_upload_id}), + call.request_urlpath_json( + "POST", test_endpoint, json={"upload-id": test_upload_id} + ), call.request_urlpath_json("GET", test_status_url), ] @@ -772,7 +793,9 @@ def test_upload_polls_status_ok(client_mock, emitter, charmhub_config): "revisions": [{"status": "more-revisions", "revision": None, "errors": None}] } status_response_3 = { - "revisions": [{"status": test_status_ok, "revision": test_revision, "errors": None}] + "revisions": [ + {"status": test_status_ok, "revision": test_revision, "errors": None} + ] } client_mock.request_urlpath_json.side_effect = [ {"status-url": test_status_url}, @@ -917,7 +940,9 @@ def test_upload_resources_endpoint(charmhub_config): with patch.object(store, "_upload") as mock: mock.return_value = test_results - result = store.upload_resource("test-charm", "test-resource", "test-type", "test-filepath") + result = store.upload_resource( + "test-charm", "test-resource", "test-type", "test-filepath" + ) expected_endpoint = "/v1/charm/test-charm/resources/test-resource/revisions" mock.assert_called_once_with( expected_endpoint, @@ -942,7 +967,9 @@ def test_upload_including_extra_parameters(client_mock, emitter, charmhub_config test_revision = 123 test_status_ok = "test-status" status_response = { - "revisions": [{"status": test_status_ok, "revision": test_revision, "errors": None}] + "revisions": [ + {"status": test_status_ok, "revision": test_revision, "errors": None} + ] } client_mock.request_urlpath_json.side_effect = [ @@ -985,7 +1012,9 @@ def test_list_revisions_ok(client_mock, charmhub_config): "created-at": "2020-06-29T22:11:00.123", "status": "approved", "errors": None, - "bases": [{"architecture": "amd64", "channel": "20.04", "name": "ubuntu"}], + "bases": [ + {"architecture": "amd64", "channel": "20.04", "name": "ubuntu"} + ], } ] } @@ -1032,7 +1061,9 @@ def test_list_revisions_errors(client_mock, charmhub_config): {"message": "error text 1", "code": "error-code-1"}, {"message": "error text 2", "code": "error-code-2"}, ], - "bases": [{"architecture": "amd64", "channel": "20.04", "name": "ubuntu"}], + "bases": [ + {"architecture": "amd64", "channel": "20.04", "name": "ubuntu"} + ], } ] } @@ -1063,7 +1094,9 @@ def test_list_revisions_several_mixed(client_mock, charmhub_config): "errors": [ {"message": "error", "code": "code"}, ], - "bases": [{"architecture": "amd64", "channel": "20.04", "name": "ubuntu"}], + "bases": [ + {"architecture": "amd64", "channel": "20.04", "name": "ubuntu"} + ], }, { "revision": 2, @@ -1071,7 +1104,9 @@ def test_list_revisions_several_mixed(client_mock, charmhub_config): "created-at": "2020-06-29T22:11:02", "status": "approved", "errors": None, - "bases": [{"architecture": "amd64", "channel": "20.04", "name": "ubuntu"}], + "bases": [ + {"architecture": "amd64", "channel": "20.04", "name": "ubuntu"} + ], }, ] } @@ -1126,7 +1161,9 @@ def test_release_simple(client_mock, charmhub_config): expected_body = [{"revision": 123, "channel": "somechannel", "resources": []}] assert client_mock.mock_calls == [ - call.request_urlpath_json("POST", "/v1/charm/testname/releases", json=expected_body), + call.request_urlpath_json( + "POST", "/v1/charm/testname/releases", json=expected_body + ), ] @@ -1141,7 +1178,9 @@ def test_release_multiple_channels(client_mock, charmhub_config): {"revision": 123, "channel": "channel3", "resources": []}, ] assert client_mock.mock_calls == [ - call.request_urlpath_json("POST", "/v1/charm/testname/releases", json=expected_body), + call.request_urlpath_json( + "POST", "/v1/charm/testname/releases", json=expected_body + ), ] @@ -1171,7 +1210,9 @@ def test_release_with_resources(client_mock, charmhub_config): }, ] assert client_mock.mock_calls == [ - call.request_urlpath_json("POST", "/v1/charm/testname/releases", json=expected_body), + call.request_urlpath_json( + "POST", "/v1/charm/testname/releases", json=expected_body + ), ] @@ -1226,7 +1267,9 @@ def test_status_ok(client_mock, charmhub_config): "created-at": "2020-06-29T22:11:05", "status": "approved", "errors": None, - "bases": [{"architecture": "amd64", "channel": "20.04", "name": "ubuntu"}], + "bases": [ + {"architecture": "amd64", "channel": "20.04", "name": "ubuntu"} + ], }, { "revision": 10, @@ -1234,7 +1277,9 @@ def test_status_ok(client_mock, charmhub_config): "created-at": "2020-06-29T22:11:10", "status": "approved", "errors": None, - "bases": [{"architecture": "amd64", "channel": "20.04", "name": "ubuntu"}], + "bases": [ + {"architecture": "amd64", "channel": "20.04", "name": "ubuntu"} + ], }, ], } @@ -1360,7 +1405,9 @@ def test_status_with_resources(client_mock, charmhub_config): "created-at": "2020-06-29T22:11:05", "status": "approved", "errors": None, - "bases": [{"architecture": "amd64", "channel": "20.04", "name": "ubuntu"}], + "bases": [ + {"architecture": "amd64", "channel": "20.04", "name": "ubuntu"} + ], }, ], } @@ -1814,7 +1861,8 @@ def test_get_oci_registry_credentials(client_mock, charmhub_config): assert client_mock.mock_calls == [ call.request_urlpath_json( - "GET", "/v1/charm/charm-name/resources/resource-name/oci-image/upload-credentials" + "GET", + "/v1/charm/charm-name/resources/resource-name/oci-image/upload-credentials", ) ] assert result.image_name == "test-image-name" @@ -1826,7 +1874,9 @@ def test_get_oci_image_blob(client_mock, charmhub_config): """Get the blob generated by Charmhub to refer to the OCI image.""" store = Store(charmhub_config) client_mock.request_urlpath_text.return_value = "some opaque stuff" - result = store.get_oci_image_blob("charm-name", "resource-name", "a-very-specific-digest") + result = store.get_oci_image_blob( + "charm-name", "resource-name", "a-very-specific-digest" + ) assert client_mock.mock_calls == [ call.request_urlpath_text( diff --git a/tests/commands/test_store_client.py b/tests/commands/test_store_client.py index 392e23c4b..4722254e0 100644 --- a/tests/commands/test_store_client.py +++ b/tests/commands/test_store_client.py @@ -41,7 +41,9 @@ def test_useragent_linux(monkeypatch): """Construct a user-agent as a patched Linux machine""" monkeypatch.setenv("TRAVIS_TESTING", "1") - os_platform = OSPlatform(system="Arch Linux", release="5.10.10-arch1-1", machine="x86_64") + os_platform = OSPlatform( + system="Arch Linux", release="5.10.10-arch1-1", machine="x86_64" + ) with ( patch("charmcraft.store.client.__version__", "1.2.3"), patch("charmcraft.utils.get_os_platform", return_value=os_platform), @@ -50,7 +52,10 @@ def test_useragent_linux(monkeypatch): patch("platform.python_version", return_value="3.9.1"), ): ua = build_user_agent() - assert ua == "charmcraft/1.2.3 (testing) Arch Linux/5.10.10-arch1-1 (x86_64) python/3.9.1" + assert ( + ua + == "charmcraft/1.2.3 (testing) Arch Linux/5.10.10-arch1-1 (x86_64) python/3.9.1" + ) def test_useragent_windows(monkeypatch): @@ -182,7 +187,9 @@ def test_client_request_text_error(client_class): """Hits the server in text mode, getting an error.""" client = client_class("http://api.test", "http://storage.test") original_error_text = "bad bad server" - client.request_mock.side_effect = craft_store.errors.CraftStoreError(original_error_text) + client.request_mock.side_effect = craft_store.errors.CraftStoreError( + original_error_text + ) with pytest.raises(craft_store.errors.CraftStoreError) as cm: client.request_urlpath_text("GET", "/somepath") @@ -193,7 +200,9 @@ def test_client_request_json_error(client_class): """Hits the server in json mode, getting an error.""" client = client_class("http://api.test", "http://storage.test") original_error_text = "bad bad server" - client.request_mock.side_effect = craft_store.errors.CraftStoreError(original_error_text) + client.request_mock.side_effect = craft_store.errors.CraftStoreError( + original_error_text + ) with pytest.raises(craft_store.errors.CraftStoreError) as cm: client.request_urlpath_json("GET", "/somepath") @@ -209,7 +218,9 @@ def test_client_hit_success_withbody(client_class): result = client.request_urlpath_text("GET", "/somepath", "somebody") - assert client.request_mock.mock_calls == [call("GET", "http://api.test/somepath", "somebody")] + assert client.request_mock.mock_calls == [ + call("GET", "http://api.test/somepath", "somebody") + ] assert result == response_value @@ -303,16 +314,16 @@ def test_client_push_response_unsuccessful(tmp_path, client_class): with patch.object(client, "_storage_push", return_value=fake_response): with pytest.raises(CraftError) as error: client.push_file(test_filepath) - expected_error = ( - "Server error while pushing file: {'successful': False, 'upload_id': None}" - ) + expected_error = "Server error while pushing file: {'successful': False, 'upload_id': None}" assert str(error.value) == expected_error def test_storage_push_succesful(client_class): """Bytes are properly pushed to the Storage.""" test_monitor = MultipartEncoderMonitor( - MultipartEncoder(fields={"binary": ("filename", "somefile", "application/octet-stream")}) + MultipartEncoder( + fields={"binary": ("filename", "somefile", "application/octet-stream")} + ) ) client = client_class("http://api.test", "http://test.url:0000") @@ -335,9 +346,7 @@ def test_alternate_auth_login_forbidden(client_class, monkeypatch): client = client_class("http://api.test", "http://storage.test") with pytest.raises(CraftError) as cm: client.login() - expected_error = ( - "Cannot login when using alternative auth through CHARMCRAFT_AUTH environment variable." - ) + expected_error = "Cannot login when using alternative auth through CHARMCRAFT_AUTH environment variable." assert str(cm.value) == expected_error @@ -347,9 +356,7 @@ def test_alternate_auth_logout_forbidden(client_class, monkeypatch): client = client_class("http://api.test", "http://storage.test") with pytest.raises(CraftError) as cm: client.logout() - expected_error = ( - "Cannot logout when using alternative auth through CHARMCRAFT_AUTH environment variable." - ) + expected_error = "Cannot logout when using alternative auth through CHARMCRAFT_AUTH environment variable." assert str(cm.value) == expected_error @@ -373,12 +380,16 @@ def test_anonymous_client_request_success_simple(): """Hits the server, all ok.""" response_value = {"foo": "bar"} fake_response = FakeResponse(content=json.dumps(response_value), status_code=200) - with patch("craft_store.http_client.HTTPClient.request") as mock_http_client_request: + with patch( + "craft_store.http_client.HTTPClient.request" + ) as mock_http_client_request: mock_http_client_request.return_value = fake_response client = AnonymousClient("http://api.test", "http://storage.test") result = client.request_urlpath_json("GET", "/somepath") - assert mock_http_client_request.mock_calls == [call("GET", "http://api.test/somepath")] + assert mock_http_client_request.mock_calls == [ + call("GET", "http://api.test/somepath") + ] assert result == response_value @@ -386,18 +397,24 @@ def test_anonymous_client_request_success_without_json_parsing(): """Hits the server, all ok, return the raw response without parsing the json.""" response_value = "whatever test response" fake_response = FakeResponse(content=response_value, status_code=200) - with patch("craft_store.http_client.HTTPClient.request") as mock_http_client_request: + with patch( + "craft_store.http_client.HTTPClient.request" + ) as mock_http_client_request: client = AnonymousClient("http://api.test", "http://storage.test") mock_http_client_request.return_value = fake_response result = client.request_urlpath_text("GET", "/somepath") - assert mock_http_client_request.mock_calls == [call("GET", "http://api.test/somepath")] + assert mock_http_client_request.mock_calls == [ + call("GET", "http://api.test/somepath") + ] assert result == response_value def test_anonymous_client_request_text_error(): """Hits the server in text mode, getting an error.""" - with patch("craft_store.http_client.HTTPClient.request") as mock_http_client_request: + with patch( + "craft_store.http_client.HTTPClient.request" + ) as mock_http_client_request: original_error_text = "bad bad server" mock_http_client_request.side_effect = craft_store.errors.CraftStoreError( original_error_text @@ -412,7 +429,9 @@ def test_anonymous_client_request_text_error(): def test_anonymous_client_request_json_error(): """Hits the server in json mode, getting an error.""" - with patch("craft_store.http_client.HTTPClient.request") as mock_http_client_request: + with patch( + "craft_store.http_client.HTTPClient.request" + ) as mock_http_client_request: original_error_text = "bad bad server" mock_http_client_request.side_effect = craft_store.errors.CraftStoreError( original_error_text @@ -429,7 +448,9 @@ def test_anonymous_client_hit_success_withbody(): """Hits the server including a body, all ok.""" response_value = {"foo": "bar"} fake_response = FakeResponse(content=response_value, status_code=200) - with patch("craft_store.http_client.HTTPClient.request") as mock_http_client_request: + with patch( + "craft_store.http_client.HTTPClient.request" + ) as mock_http_client_request: mock_http_client_request.return_value = fake_response client = AnonymousClient("http://api.test", "http://storage.test") diff --git a/tests/commands/test_store_registry.py b/tests/commands/test_store_registry.py deleted file mode 100644 index 22aef17ec..000000000 --- a/tests/commands/test_store_registry.py +++ /dev/null @@ -1,1297 +0,0 @@ -# Copyright 2021-2022 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# For further info, check https://github.com/canonical/charmcraft - -"""Tests for the OCI Registry related functionality (code in store/registry.py).""" - -import base64 -import gzip -import hashlib -import io -import json -import pathlib -import sys -import tarfile -from unittest.mock import call, patch - -import pytest -import requests -from craft_cli import CraftError - -from charmcraft import const -from charmcraft.store import registry -from charmcraft.store.registry import ( - CONFIG_MIMETYPE, - LAYER_MIMETYPE, - MANIFEST_V2_MIMETYPE, - OCTET_STREAM_MIMETYPE, - ImageHandler, - LocalDockerdInterface, - OCIRegistry, - assert_response_ok, -) - -# -- tests for response verifications - - -def create_response( - status_code=200, headers=None, raw_content=b"", json_content=None, content_type=None -): - """Create a fake requests' response.""" - if headers is None: - headers = {} - - if json_content is not None: - headers.setdefault("Content-Type", content_type or "application/json") - content_bytes = json.dumps(json_content).encode("utf8") - else: - content_bytes = raw_content - - resp = requests.Response() - resp.status_code = status_code - resp.raw = io.BytesIO(content_bytes) - resp.headers = headers # not case insensitive, but good enough - return resp - - -def test_assert_response_ok_simple_json(): - """Simple case for a good response with JSON content.""" - test_content = {"foo": 2, "bar": 1} - response = create_response(json_content=test_content) - result = assert_response_ok(response) - assert result == test_content - - -def test_assert_response_ok_not_json(): - """A good non-json response.""" - response = create_response(raw_content=b"stuff") - result = assert_response_ok(response) - assert result is None - - -def test_assert_response_ok_different_status(): - """A good response with a different status code.""" - test_content = {"foo": 2, "bar": 1} - response = create_response(json_content=test_content, status_code=201) - result = assert_response_ok(response, expected_status=201) - assert result == test_content - - -def test_assert_response_errors_in_result(): - """Response is as expected but server flags errors.""" - errors = [{"foo": "bar"}] - test_content = {"errors": errors} - response = create_response(json_content=test_content) - with pytest.raises(CraftError) as cm: - assert_response_ok(response) - assert str(cm.value) == f"Response with errors from server: {errors}" - - -def test_assert_response_bad_status_code_with_json_errors(): - """Different status code than expected, with the server including errors.""" - errors = [{"foo": "bar"}] - test_content = {"errors": errors} - response = create_response(status_code=404, json_content=test_content) - with pytest.raises(CraftError) as cm: - assert_response_ok(response) - error = cm.value - assert str(error) == "Wrong status code from server (expected=200, got=404)" - assert error.details == f"errors={errors} headers={{'Content-Type': 'application/json'}}" - - -def test_assert_response_bad_status_code_with_extra_json_errors(): - """The server still including errors, weird content type.""" - errors = [{"foo": "bar"}] - test_content = {"errors": errors} - response = create_response( - status_code=404, - json_content=test_content, - content_type="application/json;stuff", - ) - with pytest.raises(CraftError) as cm: - assert_response_ok(response) - error = cm.value - assert str(error) == "Wrong status code from server (expected=200, got=404)" - assert error.details == f"errors={errors} headers={{'Content-Type': 'application/json;stuff'}}" - - -def test_assert_response_bad_status_code_blind(): - """Different status code than expected, no more info.""" - response = create_response(status_code=500, raw_content=b"stuff") - with pytest.raises(CraftError) as cm: - assert_response_ok(response) - error = cm.value - assert str(error) == "Wrong status code from server (expected=200, got=500)" - assert error.details == "errors=None headers={}" - - -# -- tests for OCIRegistry auth & hit helpers - - -def test_auth_simple(responses): - """Simple authentication.""" - responses.add( - responses.GET, - "https://auth.fakereg.com?service=test-service&scope=test-scope", - json={"token": "test-token"}, - ) - - ocireg = OCIRegistry("https://fakereg.com", "test-image") - auth_info = { - "realm": "https://auth.fakereg.com", - "service": "test-service", - "scope": "test-scope", - } - token = ocireg._authenticate(auth_info) - assert token == "test-token" - sent_auth_header = responses.calls[0].request.headers.get("Authorization") - assert sent_auth_header is None - - -def test_auth_with_credentials(emitter, responses): - """Authenticate passing credentials.""" - responses.add( - responses.GET, - "https://auth.fakereg.com?service=test-service&scope=test-scope", - json={"token": "test-token"}, - ) - - ocireg = OCIRegistry( - "https://fakereg.com", - "test-image", - username="test-user", - password="test-password", - ) - auth_info = { - "realm": "https://auth.fakereg.com", - "service": "test-service", - "scope": "test-scope", - } - token = ocireg._authenticate(auth_info) - assert token == "test-token" - sent_auth_header = responses.calls[0].request.headers.get("Authorization") - expected_encoded = base64.b64encode(b"test-user:test-password") - assert sent_auth_header == "Basic " + expected_encoded.decode("ascii") - - # generic auth indication is logged but NOT the credentials - expected = f"Authenticating! {auth_info}" - emitter.assert_trace(expected) - - -def test_auth_with_just_username(responses): - """Authenticate passing credentials.""" - responses.add( - responses.GET, - "https://auth.fakereg.com?service=test-service&scope=test-scope", - json={"token": "test-token"}, - ) - - ocireg = OCIRegistry("https://fakereg.com", "test-image", username="test-user") - auth_info = { - "realm": "https://auth.fakereg.com", - "service": "test-service", - "scope": "test-scope", - } - token = ocireg._authenticate(auth_info) - assert token == "test-token" - sent_auth_header = responses.calls[0].request.headers.get("Authorization") - expected_encoded = base64.b64encode(b"test-user:") - assert sent_auth_header == "Basic " + expected_encoded.decode("ascii") - - -def test_hit_simple_initial_auth_ok(emitter, responses): - """Simple GET with auth working at once.""" - # set the Registry with an initial token - ocireg = OCIRegistry("https://fakereg.com", "test-image") - ocireg.auth_token = "some auth token" - - # fake a 200 response - responses.add(responses.GET, "https://fakereg.com/api/stuff") - - # try it - response = ocireg._hit("GET", "https://fakereg.com/api/stuff") - assert response == responses.calls[0].response - - # verify it authed ok - sent_auth_header = responses.calls[0].request.headers.get("Authorization") - assert sent_auth_header == "Bearer some auth token" - - # logged what it did - expected = "Hitting the registry: GET https://fakereg.com/api/stuff" - emitter.assert_trace(expected) - - -def test_hit_simple_re_auth_ok(responses): - """Simple GET but needing to re-authenticate.""" - # set the Registry - ocireg = OCIRegistry("https://fakereg.com", "test-image") - ocireg.auth_token = "some auth token" - - # need to set up two responses! - # - the 401 response with the proper info to re-auth - # - the request that actually works - headers = { - "Www-Authenticate": ( - 'Bearer realm="https://auth.fakereg.com/token",' - 'service="https://fakereg.com",scope="repository:library/stuff:pull"' - ) - } - responses.add(responses.GET, "https://fakereg.com/api/stuff", headers=headers, status=401) - responses.add(responses.GET, "https://fakereg.com/api/stuff") - - # try it, isolating the re-authentication (tested separately above) - with patch.object(ocireg, "_authenticate") as mock_auth: - mock_auth.return_value = "new auth token" - response = ocireg._hit("GET", "https://fakereg.com/api/stuff") - assert response == responses.calls[1].response - mock_auth.assert_called_with( - { - "realm": "https://auth.fakereg.com/token", - "scope": "repository:library/stuff:pull", - "service": "https://fakereg.com", - } - ) - - # verify it authed ok both times, with corresponding tokens, and that it stored the new one - sent_auth_header = responses.calls[0].request.headers.get("Authorization") - assert sent_auth_header == "Bearer some auth token" - sent_auth_header = responses.calls[1].request.headers.get("Authorization") - assert sent_auth_header == "Bearer new auth token" - assert ocireg.auth_token == "new auth token" - - -def test_hit_simple_re_auth_problems(responses): - """Bad response from the re-authentication process.""" - ocireg = OCIRegistry("https://fakereg.com", "test-image") - - # set only one response, a 401 which is broken and all will end there - headers = {"Www-Authenticate": "broken header"} - responses.add(responses.GET, "https://fakereg.com/api/stuff", headers=headers, status=401) - - # try it, isolating the re-authentication (tested separately above) - expected = ( - "Bad 401 response: Bearer not found; headers: {.*'Www-Authenticate': 'broken header'.*}" - ) - with pytest.raises(CraftError, match=expected): - ocireg._hit("GET", "https://fakereg.com/api/stuff") - - -def test_hit_different_method(responses): - """Simple request using something else than GET.""" - # set the Registry with an initial token - ocireg = OCIRegistry("https://fakereg.com", "test-image") - ocireg.auth_token = "some auth token" - - # fake a 200 response - responses.add(responses.POST, "https://fakereg.com/api/stuff") - - # try it - response = ocireg._hit("POST", "https://fakereg.com/api/stuff") - assert response == responses.calls[0].response - - -def test_hit_including_headers(responses): - """A request including more headers.""" - # set the Registry with an initial token - ocireg = OCIRegistry("https://fakereg.com", "test-image") - ocireg.auth_token = "some auth token" - - # fake a 200 response - responses.add(responses.POST, "https://fakereg.com/api/stuff") - - # try it - response = ocireg._hit("POST", "https://fakereg.com/api/stuff", headers={"FOO": "bar"}) - assert response == responses.calls[0].response - - # check that it sent the requested header AND the automatic auth one - sent_headers = responses.calls[0].request.headers - assert sent_headers.get("FOO") == "bar" - assert sent_headers.get("Authorization") == "Bearer some auth token" - - -def test_hit_extra_parameters(responses): - """The request can include extra parameters.""" - ocireg = OCIRegistry("https://fakereg.com", "test-image") - - # fake a 200 response - responses.add(responses.PUT, "https://fakereg.com/api/stuff") - - # try it - response = ocireg._hit("PUT", "https://fakereg.com/api/stuff", data=b"test-payload") - assert response == responses.calls[0].response - assert responses.calls[0].request.body == b"test-payload" - - -def test_hit_no_log(emitter, responses): - """Simple request but avoiding log.""" - # set the Registry with an initial token - ocireg = OCIRegistry("https://fakereg.com", "test-image") - ocireg.auth_token = "some auth token" - - # fake a 200 response - responses.add(responses.PUT, "https://fakereg.com/api/stuff") - - # try it - ocireg._hit("PUT", "https://fakereg.com/api/stuff", log=False) - - # nothing shown! - emitter.assert_interactions(None) - - -# -- tests for other OCIRegistry helpers: checkers if stuff uploaded - - -def test_ociregistry_is_manifest_uploaded(): - """Check the simple call with correct path to the generic verifier.""" - ocireg = OCIRegistry("https://fakereg.com", "test-image") - with patch.object(ocireg, "_is_item_already_uploaded") as mock_verifier: - mock_verifier.return_value = "whatever" - result = ocireg.is_manifest_already_uploaded("test-reference") - assert result == "whatever" - url = "https://fakereg.com/v2/test-image/manifests/test-reference" - mock_verifier.assert_called_with(url) - - -def test_ociregistry_is_blob_uploaded(): - """Check the simple call with correct path to the generic verifier.""" - ocireg = OCIRegistry("https://fakereg.com", "test-image") - with patch.object(ocireg, "_is_item_already_uploaded") as mock_verifier: - mock_verifier.return_value = "whatever" - result = ocireg.is_blob_already_uploaded("test-reference") - assert result == "whatever" - url = "https://fakereg.com/v2/test-image/blobs/test-reference" - mock_verifier.assert_called_with(url) - - -def test_ociregistry_is_item_uploaded_simple_yes(responses): - """Simple case for the item already uploaded.""" - ocireg = OCIRegistry("http://fakereg.com/", "test-image") - url = "http://fakereg.com/v2/test-image/stuff/some-reference" - responses.add(responses.HEAD, url) - - # try it - result = ocireg._is_item_already_uploaded(url) - assert result is True - - -def test_ociregistry_is_item_uploaded_simple_no(responses): - """Simple case for the item NOT already uploaded.""" - ocireg = OCIRegistry("http://fakereg.com/", "test-image") - url = "http://fakereg.com/v2/test-image/stuff/some-reference" - responses.add(responses.HEAD, url, status=404) - - # try it - result = ocireg._is_item_already_uploaded(url) - assert result is False - - -@pytest.mark.parametrize("redir_status", [302, 307]) -def test_ociregistry_is_item_uploaded_redirect(responses, redir_status): - """The verification is redirected to somewhere else.""" - ocireg = OCIRegistry("http://fakereg.com/", "test-image") - url1 = "http://fakereg.com/v2/test-image/stuff/some-reference" - url2 = "http://fakereg.com/real-check/test-image/stuff/some-reference" - responses.add(responses.HEAD, url1, status=redir_status, headers={"Location": url2}) - responses.add(responses.HEAD, url2, status=200) - - # try it - result = ocireg._is_item_already_uploaded(url1) - assert result is True - - -def test_ociregistry_is_item_uploaded_strange_response(responses, emitter): - """Unexpected response.""" - ocireg = OCIRegistry("http://fakereg.com/", "test-image") - url = "http://fakereg.com/v2/test-image/stuff/some-reference" - responses.add(responses.HEAD, url, status=400, headers={"foo": "bar"}) - - # try it - result = ocireg._is_item_already_uploaded(url) - assert result is False - expected = ( - "Bad response when checking for uploaded " - "'http://fakereg.com/v2/test-image/stuff/some-reference': 400 " - "(headers={'Content-Type': 'text/plain', 'foo': 'bar'})" - ) - emitter.assert_debug(expected) - - -# -- test for the OCIRegistry manifest upload - - -def test_ociregistry_upload_manifest_v2(responses, emitter): - """Upload a V2 manifest.""" - ocireg = OCIRegistry("https://fakereg.com", "test-image") - - url = "https://fakereg.com/v2/test-image/manifests/test-reference" - responses.add(responses.PUT, url, status=201) - - # try it - raw_manifest_data = "test-manifest" - ocireg.upload_manifest(raw_manifest_data, "test-reference") - - # check logs - emitter.assert_progress("Uploading manifest with reference test-reference") - emitter.assert_progress("Manifest uploaded OK") - - # check header and data sent - assert responses.calls[0].request.headers["Content-Type"] == MANIFEST_V2_MIMETYPE - assert responses.calls[0].request.body == raw_manifest_data.encode("ascii") - - -# -- tests for the OCIRegistry blob upload - - -def test_ociregistry_upload_blob_complete(tmp_path, emitter, responses, monkeypatch): - """Complete upload of a binary to the registry.""" - ocireg = OCIRegistry("https://fakereg.com", "test-image") - base_url = "https://fakereg.com/v2/test-image/" - - # fake the first initial response - pump_url_1 = base_url + "fakeurl-1" - responses.add( - responses.POST, - base_url + "blobs/uploads/", - status=202, - headers={"Location": pump_url_1, "Range": "0-0"}, - ) - - # and the intermediate ones, chained - pump_url_2 = base_url + "fakeurl-2" - pump_url_3 = base_url + "fakeurl-3" - pump_url_4 = base_url + "fakeurl-4" - responses.add(responses.PATCH, pump_url_1, status=202, headers={"Location": pump_url_2}) - responses.add(responses.PATCH, pump_url_2, status=202, headers={"Location": pump_url_3}) - responses.add(responses.PATCH, pump_url_3, status=202, headers={"Location": pump_url_4}) - - # finally, the closing url - responses.add( - responses.PUT, - base_url + "fakeurl-4&digest=test-digest", - status=201, - headers={"Docker-Content-Digest": "test-digest"}, - ) - - # prepare a fake content that will be pushed in 3 parts - monkeypatch.setattr(registry, "CHUNK_SIZE", 3) - bytes_source = tmp_path / "testfile" - bytes_source.write_text("abcdefgh") - - # call! - ocireg.upload_blob(bytes_source, 8, "test-digest") - - # check all the sent headers - expected_headers_per_request = [ - {}, # nothing special in the initial one - { - "Content-Length": "3", - "Content-Range": "0-3", - "Content-Type": OCTET_STREAM_MIMETYPE, - }, - { - "Content-Length": "3", - "Content-Range": "3-6", - "Content-Type": OCTET_STREAM_MIMETYPE, - }, - { - "Content-Length": "2", - "Content-Range": "6-8", - "Content-Type": OCTET_STREAM_MIMETYPE, - }, - {"Content-Length": "0", "Connection": "close"}, # closing - ] - for idx, expected_headers in enumerate(expected_headers_per_request): - sent_headers = responses.calls[idx].request.headers - for key, value in expected_headers.items(): - assert sent_headers.get(key) == value - - emitter.assert_interactions( - [ - call("progress", "Getting URL to push the blob"), - call( - "trace", - "Hitting the registry: POST https://fakereg.com/v2/test-image/blobs/uploads/", - ), - call("progress", "Got upload URL ok with range 0-0"), - call("progress_bar", "Uploading...", 8), - call("advance", 3), - call("advance", 3), - call("advance", 2), - call("progress", "Closing the upload"), - call( - "trace", - ( - "Hitting the registry: PUT " - "https://fakereg.com/v2/test-image/fakeurl-4&digest=test-digest" - ), - ), - call("progress", "Upload finished OK"), - ] - ) - - -def test_ociregistry_upload_blob_bad_initial_response(responses): - """Bad initial response when starting to upload.""" - ocireg = OCIRegistry("https://fakereg.com", "test-image") - base_url = "https://fakereg.com/v2/test-image/" - - # fake the first initial response with problems - responses.add(responses.POST, base_url + "blobs/uploads/", status=500) - - # call! - msg = r"Wrong status code from server \(expected=202, got=500\).*" - with pytest.raises(CraftError, match=msg): - ocireg.upload_blob("test-filepath", 8, "test-digest") - - -def test_ociregistry_upload_blob_bad_upload_range(responses): - """Received a broken range info.""" - ocireg = OCIRegistry("https://fakereg.com", "test-image") - base_url = "https://fakereg.com/v2/test-image/" - - # fake the first initial response with problems - responses.add( - responses.POST, - base_url + "blobs/uploads/", - status=202, - headers={"Location": "test-next-url", "Range": "9-9"}, - ) - - # call! - with pytest.raises(CraftError) as cm: - ocireg.upload_blob("test-filepath", 8, "test-digest") - error = cm.value - assert str(error) == "Server error: bad range received" - assert error.details == "Range='9-9'" - - -def test_ociregistry_upload_blob_resumed(tmp_path, emitter, responses): - """The upload is resumed after server indication to do so.""" - ocireg = OCIRegistry("https://fakereg.com", "test-image") - base_url = "https://fakereg.com/v2/test-image/" - - # fake the first initial response, indicating that the store has already the first 5 bytes - pump_url_1 = base_url + "fakeurl-1" - responses.add( - responses.POST, - base_url + "blobs/uploads/", - status=202, - headers={"Location": pump_url_1, "Range": "0-4"}, - ) # has bytes in position 0, 1, 2, 3 & 4 - - # and the intermediate one - pump_url_2 = base_url + "fakeurl-2" - responses.add(responses.PATCH, pump_url_1, status=202, headers={"Location": pump_url_2}) - - # finally, the closing url - responses.add( - responses.PUT, - base_url + "fakeurl-2&digest=test-digest", - status=201, - headers={"Docker-Content-Digest": "test-digest"}, - ) - - # prepare a fake content - bytes_source = tmp_path / "testfile" - bytes_source.write_text("abcdefgh") - - # call! - ocireg.upload_blob(bytes_source, 8, "test-digest") - - # check all the sent headers - expected_headers_per_request = [ - {}, # nothing special in the initial one - { - "Content-Length": "3", - "Content-Range": "5-8", - "Content-Type": OCTET_STREAM_MIMETYPE, - }, - {"Content-Length": "0", "Connection": "close"}, # closing - ] - for idx, expected_headers in enumerate(expected_headers_per_request): - sent_headers = responses.calls[idx].request.headers - for key, value in expected_headers.items(): - assert sent_headers.get(key) == value - - emitter.assert_interactions( - [ - call("progress", "Getting URL to push the blob"), - call( - "trace", - "Hitting the registry: POST https://fakereg.com/v2/test-image/blobs/uploads/", - ), - call("progress", "Got upload URL ok with range 0-4"), - call("progress_bar", "Uploading...", 8), - call("advance", 5), - call("advance", 3), - call("progress", "Closing the upload"), - call( - "trace", - ( - "Hitting the registry: PUT " - "https://fakereg.com/v2/test-image/fakeurl-2&digest=test-digest" - ), - ), - call("progress", "Upload finished OK"), - ] - ) - - -def test_ociregistry_upload_blob_bad_response_middle(tmp_path, responses, monkeypatch): - """Bad response from the server when pumping bytes.""" - ocireg = OCIRegistry("https://fakereg.com", "test-image") - base_url = "https://fakereg.com/v2/test-image/" - - # fake the first initial response - pump_url_1 = base_url + "fakeurl-1" - responses.add( - responses.POST, - base_url + "blobs/uploads/", - status=202, - headers={"Location": pump_url_1, "Range": "0-0"}, - ) - - # and the intermediate ones, chained, with a crash - pump_url_2 = base_url + "fakeurl-2" - responses.add(responses.PATCH, pump_url_1, status=202, headers={"Location": pump_url_2}) - responses.add(responses.PATCH, pump_url_2, status=504) - - # prepare a fake content that will be pushed in 3 parts - monkeypatch.setattr(registry, "CHUNK_SIZE", 3) - bytes_source = tmp_path / "testfile" - bytes_source.write_text("abcdefgh") - - # call! - msg = r"Wrong status code from server \(expected=202, got=504\).*" - with pytest.raises(CraftError, match=msg): - ocireg.upload_blob(bytes_source, 8, "test-digest") - - -def test_ociregistry_upload_blob_bad_response_closing(tmp_path, responses): - """Bad response from the server when closing the upload.""" - ocireg = OCIRegistry("https://fakereg.com", "test-image") - base_url = "https://fakereg.com/v2/test-image/" - - # fake the first initial response - pump_url_1 = base_url + "fakeurl-1" - responses.add( - responses.POST, - base_url + "blobs/uploads/", - status=202, - headers={"Location": pump_url_1, "Range": "0-0"}, - ) - - # and the intermediate one - pump_url_2 = base_url + "fakeurl-2" - responses.add(responses.PATCH, pump_url_1, status=202, headers={"Location": pump_url_2}) - - # finally, the closing url, crashing - responses.add(responses.PUT, base_url + "fakeurl-2&digest=test-digest", status=502) - - # prepare a fake content - bytes_source = tmp_path / "testfile" - bytes_source.write_text("abcdefgh") - - # call! - msg = r"Wrong status code from server \(expected=201, got=502\).*" - with pytest.raises(CraftError, match=msg): - ocireg.upload_blob(bytes_source, 8, "test-digest") - - -def test_ociregistry_upload_blob_bad_final_digest(tmp_path, responses): - """Bad digest from server after closing the upload.""" - ocireg = OCIRegistry("https://fakereg.com", "test-image") - base_url = "https://fakereg.com/v2/test-image/" - - # fake the first initial response - pump_url_1 = base_url + "fakeurl-1" - responses.add( - responses.POST, - base_url + "blobs/uploads/", - status=202, - headers={"Location": pump_url_1, "Range": "0-0"}, - ) - - # and the intermediate one - pump_url_2 = base_url + "fakeurl-2" - responses.add(responses.PATCH, pump_url_1, status=202, headers={"Location": pump_url_2}) - - # finally, the closing url, bad digest - responses.add( - responses.PUT, - base_url + "fakeurl-2&digest=test-digest", - status=201, - headers={"Docker-Content-Digest": "somethingelse"}, - ) - - # prepare a fake content - bytes_source = tmp_path / "testfile" - bytes_source.write_text("abcdefgh") - - # call! - msg = "Server error: the upload is corrupted" - with pytest.raises(CraftError, match=msg): - ocireg.upload_blob(bytes_source, 8, "test-digest") - - -# -- tests for the ImageHandler helpers and functionalities - - -def test_localdockerinterface_get_info_by_id_ok(responses, emitter): - """Get image info ok.""" - test_image_info = {"some": "stuff"} - responses.add( - responses.GET, - LocalDockerdInterface.dockerd_socket_baseurl + "/images/test-id/json", - json=test_image_info, - ) - ldi = LocalDockerdInterface() - resp = ldi.get_image_info_from_id("test-id") - assert resp == test_image_info - - emitter.assert_interactions(None) - - -def test_localdockerinterface_get_info_by_id_not_found(responses, emitter): - """Get image info for something that is not there.""" - # return 404, which means that the image was not found - responses.add( - responses.GET, - LocalDockerdInterface.dockerd_socket_baseurl + "/images/test-id/json", - status=404, - ) - ldi = LocalDockerdInterface() - resp = ldi.get_image_info_from_id("test-id") - assert resp is None - - emitter.assert_interactions(None) - - -def test_localdockerinterface_get_info_by_id_bad_response(responses, emitter): - """Docker answered badly when checking for the image.""" - # weird dockerd behaviour - responses.add( - responses.GET, - LocalDockerdInterface.dockerd_socket_baseurl + "/images/test-id/json", - status=500, - ) - ldi = LocalDockerdInterface() - resp = ldi.get_image_info_from_id("test-id") - assert resp is None - - emitter.assert_debug("Bad response when validating local image: 500") - - -def test_localdockerinterface_get_info_by_id_disconnected(emitter, responses): - """No daemon to talk to (see responses used as fixture but no listening).""" - ldi = LocalDockerdInterface() - resp = ldi.get_image_info_from_id("test-id") - assert resp is None - - emitter.assert_debug( - "Cannot connect to /var/run/docker.sock , please ensure dockerd is running." - ) - - -def test_localdockerinterface_get_info_by_digest_ok(responses, emitter): - """Get image info ok.""" - test_image_info_1 = {"some": "stuff", "RepoDigests": ["name @ sha256:test-digest", "other"]} - test_image_info_2 = {"some": "stuff", "RepoDigests": ["foo", "bar"]} - test_search_respoonse = [test_image_info_1, test_image_info_2] - responses.add( - responses.GET, - LocalDockerdInterface.dockerd_socket_baseurl + "/images/json", - json=test_search_respoonse, - ) - ldi = LocalDockerdInterface() - resp = ldi.get_image_info_from_digest("sha256:test-digest") - assert resp == test_image_info_1 - - emitter.assert_interactions(None) - - -def test_localdockerinterface_get_info_by_digest_not_found(responses, emitter): - """Get image info for something that is not there.""" - test_image_info_1 = {"some": "stuff", "RepoDigests": ["other"]} - test_image_info_2 = {"some": "stuff", "RepoDigests": ["foo", "bar"]} - test_search_respoonse = [test_image_info_1, test_image_info_2] - responses.add( - responses.GET, - LocalDockerdInterface.dockerd_socket_baseurl + "/images/json", - json=test_search_respoonse, - ) - ldi = LocalDockerdInterface() - resp = ldi.get_image_info_from_digest("sha256:test-digest") - assert resp is None - - emitter.assert_interactions(None) - - -def test_localdockerinterface_get_info_by_digest_none_digest(responses, emitter): - """Get image info for something that is not there.""" - test_image_info_1 = {"some": "stuff", "RepoDigests": None} - test_search_respoonse = [test_image_info_1] - responses.add( - responses.GET, - LocalDockerdInterface.dockerd_socket_baseurl + "/images/json", - json=test_search_respoonse, - ) - ldi = LocalDockerdInterface() - resp = ldi.get_image_info_from_digest("sha256:test-digest") - assert resp is None - - emitter.assert_interactions(None) - - -def test_localdockerinterface_get_info_by_digest_bad_response(responses, emitter): - """Docker answered badly when checking for the image.""" - # weird dockerd behaviour - responses.add( - responses.GET, - LocalDockerdInterface.dockerd_socket_baseurl + "/images/json", - status=500, - ) - ldi = LocalDockerdInterface() - resp = ldi.get_image_info_from_digest("sha256:test-digest") - assert resp is None - - emitter.assert_debug("Bad response when validating local image: 500") - - -def test_localdockerinterface_get_info_by_digest_disconnected(emitter, responses): - """No daemon to talk to (see responses used as fixture but no listening).""" - ldi = LocalDockerdInterface() - resp = ldi.get_image_info_from_digest("sha256:test-digest") - assert resp is None - - emitter.assert_debug( - "Cannot connect to /var/run/docker.sock , please ensure dockerd is running." - ) - - -def test_localdockerinterface_get_streamed_content(responses): - """Get the content streamed.""" - - class AuditableBufferedReader(io.BufferedReader): - """BufferedReader that records the size of each reading.""" - - _test_read_chunks = [] - - def read(self, size): - self._test_read_chunks.append(size) - return super().read(size) - - test_content = AuditableBufferedReader(io.BytesIO(b"123456789")) - responses.add( - responses.GET, - LocalDockerdInterface.dockerd_socket_baseurl + "/images/test-id/get", - body=test_content, - ) - ldi = LocalDockerdInterface() - resp = ldi.get_streamed_image_content("test-id") - assert test_content._test_read_chunks == [] - - chunk_size = 5 - streamed = resp.iter_content(chunk_size) - assert next(streamed) == b"12345" - assert test_content._test_read_chunks == [chunk_size] - assert next(streamed) == b"6789" - assert test_content._test_read_chunks == [chunk_size, chunk_size] - with pytest.raises(StopIteration): - next(streamed) - - -class FakeRegistry: - """A fake registry to mimic behaviour of the real one and record actions.""" - - def __init__(self, image_name=None): - self.image_name = image_name - self.stored_manifests = {} - self.stored_blobs = {} - - def is_manifest_already_uploaded(self, reference): - return reference in self.stored_manifests - - def upload_manifest(self, content, reference, multiple_manifest=False): - self.stored_manifests[reference] = (content, multiple_manifest) - - def get_manifest(self, reference): - return self.stored_manifests[reference] - - def is_blob_already_uploaded(self, reference): - return reference in self.stored_blobs - - def upload_blob(self, filepath, size, digest): - self.stored_blobs[digest] = (pathlib.Path(filepath).read_bytes(), size) - - -class FakeDockerd: - """A fake dockerd interface to mimic behaviour of the real one.""" - - def __init__(self, image_id, image_info, image_content): - self.image_info = image_info - self.image_content = image_content - self.used_id = image_id - - def get_streamed_image_content(self, image_id): - assert image_id == self.used_id - - class FakeResponse: - def __init__(self, content): - self.content = io.BytesIO(content) - - def iter_content(self, chunk_size): - while True: - chunk = self.content.read(chunk_size) - if not chunk: - break - yield chunk - - return FakeResponse(self.image_content) - - -def test_imagehandler_check_in_registry_yes(): - """Check if an image is in the registry and find it.""" - fake_registry = FakeRegistry() - fake_registry.stored_manifests["test-reference"] = ( - None, - "test-digest", - "test-manifest", - ) - - im = ImageHandler(fake_registry) - result = im.check_in_registry("test-reference") - assert result is True - - -def test_imagehandler_check_in_registry_no(): - """Check if an image is in the registry and don't find it.""" - fake_registry = FakeRegistry() - - im = ImageHandler(fake_registry) - result = im.check_in_registry("test-reference") - assert result is False - - -def test_imagehandler_extract_file_simple(tmp_path, emitter): - """Extract a file from the tarfile and gets its info.""" - # create a tar file with one file inside - test_content = b"test content for the sample file" - sample_file = tmp_path / "testfile.txt" - sample_file.write_bytes(test_content) - tar_filepath = tmp_path / "testfile.tar" - with tarfile.open(tar_filepath, "w") as tar: - tar.add(sample_file, "testfile.txt") - - im = ImageHandler("registry") - with tarfile.open(tar_filepath, "r") as tar: - tmp_filepath, size, digest = im._extract_file(tar, "testfile.txt") - - assert size == len(test_content) - assert digest == "sha256:" + hashlib.sha256(test_content).hexdigest() - assert pathlib.Path(tmp_filepath).read_bytes() == test_content - - emitter.assert_progress("Extracting file 'testfile.txt' from local tar (compress=False)") - - -def test_imagehandler_extract_file_compressed_ok(tmp_path, emitter): - """Extract a file from the tarfile and gets its info after compressed.""" - # create a tar file with one file inside - test_content = b"test content for the sample file" - sample_file = tmp_path / "testfile.txt" - sample_file.write_bytes(test_content) - tar_filepath = tmp_path / "testfile.tar" - with tarfile.open(tar_filepath, "w") as tar: - tar.add(sample_file, "testfile.txt") - - im = ImageHandler("registry") - with tarfile.open(tar_filepath, "r") as tar: - tmp_filepath, size, digest = im._extract_file(tar, "testfile.txt", compress=True) - - compressed_content = pathlib.Path(tmp_filepath).read_bytes() - assert size == len(compressed_content) - assert digest == "sha256:" + hashlib.sha256(compressed_content).hexdigest() - assert gzip.decompress(compressed_content) == test_content - - emitter.assert_progress("Extracting file 'testfile.txt' from local tar (compress=True)") - - -def test_imagehandler_extract_file_compressed_deterministic(tmp_path, emitter): - """Different compressions for the same file give the exact same data.""" - # create a tar file with one file inside - test_content = b"test content for the sample file" - sample_file = tmp_path / "testfile.txt" - sample_file.write_bytes(test_content) - tar_filepath = tmp_path / "testfile.tar" - with tarfile.open(tar_filepath, "w") as tar: - tar.add(sample_file, "testfile.txt") - - im = ImageHandler("registry") - with tarfile.open(tar_filepath, "r") as tar: - _, _, digest1 = im._extract_file(tar, "testfile.txt", compress=True) - _, _, digest2 = im._extract_file(tar, "testfile.txt", compress=True) - - assert digest1 == digest2 - - -def test_imagehandler_uploadblob_first_time(emitter, tmp_path): - """Upload a blob for the first time.""" - tmp_file = tmp_path / "somebinary.dat" - tmp_file.write_text("testcontent") - - fake_registry = FakeRegistry() - - im = ImageHandler(fake_registry) - im._upload_blob(str(tmp_file), 20, "superdigest") - - # check it was uploaded - assert fake_registry.stored_blobs["superdigest"] == (b"testcontent", 20) - - # verify the file is cleaned - assert not tmp_file.exists() - - emitter.assert_interactions(None) - - -def test_imagehandler_uploadblob_duplicated(emitter, tmp_path): - """Upload a blob that was already there.""" - tmp_file = tmp_path / "somebinary.dat" - tmp_file.write_text("testcontent") - - fake_registry = FakeRegistry() - # add the entry for the blob, the value is not important - fake_registry.stored_blobs["superdigest"] = None - - im = ImageHandler(fake_registry) - im._upload_blob(str(tmp_file), 20, "superdigest") - - # check it was NOT uploaded again - assert fake_registry.stored_blobs["superdigest"] is None - - # verify the file is cleaned - assert not tmp_file.exists() - - emitter.assert_progress("Blob was already uploaded") - - -@pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported") -def test_imagehandler_uploadfromlocal_complete(emitter, tmp_path, responses, monkeypatch): - """Complete process of uploading a local image.""" - # fake an image in disk (a tar file with config, layers, and a manifest).""" - test_tar_image = tmp_path / "test-image.tar" - test_tar_config_content = b"fake config for the image" - test_tar_layer1_content = b"fake first layer content for the image" - test_tar_layer2_content = b"fake second layer content for the image" - test_manifest_content = json.dumps( - [ - { - "Config": const.JUJU_CONFIG_FILENAME, - "Layers": ["layer1.bin", "layer2.bin"], - } - ] - ).encode("ascii") - tar_file = tarfile.TarFile(test_tar_image, "w") - tar_content = [ - ("manifest.json", test_manifest_content), - (const.JUJU_CONFIG_FILENAME, test_tar_config_content), - ("layer1.bin", test_tar_layer1_content), - ("layer2.bin", test_tar_layer2_content), - ] - for name, content in tar_content: - ti = tarfile.TarInfo(name) - ti.size = len(content) - tar_file.addfile(ti, fileobj=io.BytesIO(content)) - tar_file.close() - - # prepare the image info - image_size = test_tar_image.stat().st_size - image_id = "test-image-id" - image_info = {"Size": image_size, "Id": image_id, "foobar": "etc"} - fakedockerd = FakeDockerd(image_id, image_info, test_tar_image.read_bytes()) - monkeypatch.setattr(registry, "LocalDockerdInterface", lambda: fakedockerd) - - # ensure two reads from that image, so we can properly test progress - image_read_from_dockerd_size_1 = int(image_size * 0.7) - image_read_from_dockerd_size_2 = image_size - image_read_from_dockerd_size_1 - monkeypatch.setattr(registry, "CHUNK_SIZE", image_read_from_dockerd_size_1) - - fake_registry = FakeRegistry() - im = ImageHandler(fake_registry) - main_call_result = im.upload_from_local(image_info) - - # check the uploaded blobs: first the config (as is), then the layers (compressed) - ( - uploaded_config, - uploaded_layer1, - uploaded_layer2, - ) = fake_registry.stored_blobs.items() - - (u_config_digest, (u_config_content, u_config_size)) = uploaded_config - assert u_config_content == test_tar_config_content - assert u_config_size == len(u_config_content) - assert u_config_digest == "sha256:" + hashlib.sha256(u_config_content).hexdigest() - - (u_layer1_digest, (u_layer1_content, u_layer1_size)) = uploaded_layer1 - assert gzip.decompress(u_layer1_content) == test_tar_layer1_content - assert u_layer1_size == len(u_layer1_content) - assert u_layer1_digest == "sha256:" + hashlib.sha256(u_layer1_content).hexdigest() - - (u_layer2_digest, (u_layer2_content, u_layer2_size)) = uploaded_layer2 - assert gzip.decompress(u_layer2_content) == test_tar_layer2_content - assert u_layer2_size == len(u_layer2_content) - assert u_layer2_digest == "sha256:" + hashlib.sha256(u_layer2_content).hexdigest() - - # check the uploaded manifest metadata and real content - (uploaded_manifest,) = fake_registry.stored_manifests.items() - (u_manifest_digest, (u_manifest_content, u_manifest_multiple)) = uploaded_manifest - assert ( - u_manifest_digest - == "sha256:" + hashlib.sha256(u_manifest_content.encode("utf8")).hexdigest() - ) - assert u_manifest_multiple is False - - # the response from the function we're testing is the final remote digest - assert main_call_result == u_manifest_digest - - u_manifest = json.loads(u_manifest_content) - assert u_manifest["mediaType"] == MANIFEST_V2_MIMETYPE - assert u_manifest["schemaVersion"] == 2 - - assert u_manifest["config"] == { - "digest": u_config_digest, - "mediaType": CONFIG_MIMETYPE, - "size": u_config_size, - } - - assert u_manifest["layers"] == [ - { - "digest": u_layer1_digest, - "mediaType": LAYER_MIMETYPE, - "size": u_layer1_size, - }, - { - "digest": u_layer2_digest, - "mediaType": LAYER_MIMETYPE, - "size": u_layer2_size, - }, - ] - - # check the output logs - emitter.assert_interactions( - [ - call("progress", f"Getting the image from the local repo; size={image_size}"), - call("progress_bar", "Reading image...", image_size), - call("advance", image_read_from_dockerd_size_1), - call("advance", image_read_from_dockerd_size_2), - call("progress", "Extracting file 'config.yaml' from local tar (compress=False)"), - call( - "progress", - f"Uploading config blob, size={u_config_size}, digest={u_config_digest}", - ), - call("progress", "Extracting file 'layer1.bin' from local tar (compress=True)"), - call( - "progress", - f"Uploading layer blob 1/2, size={u_layer1_size}, digest={u_layer1_digest}", - ), - call("progress", "Extracting file 'layer2.bin' from local tar (compress=True)"), - call( - "progress", - f"Uploading layer blob 2/2, size={u_layer2_size}, digest={u_layer2_digest}", - ), - ] - ) - - -@pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported") -def test_imagehandler_uploadfromlocal_no_config(emitter, tmp_path, monkeypatch): - """Particular case of a manifest without config.""" - # fake an image in disk (a tar file with NO config, a layer, and a manifest).""" - test_tar_image = tmp_path / "test-image.tar" - test_tar_layer_content = b"fake layer content for the image" - test_manifest_content = json.dumps( - [ - { - "Layers": ["layer.bin"], - } - ] - ).encode("ascii") - tar_file = tarfile.TarFile(test_tar_image, "w") - tar_content = [ - ("manifest.json", test_manifest_content), - ("layer.bin", test_tar_layer_content), - ] - for name, content in tar_content: - ti = tarfile.TarInfo(name) - ti.size = len(content) - tar_file.addfile(ti, fileobj=io.BytesIO(content)) - tar_file.close() - - # return 200 with the image info - image_size = test_tar_image.stat().st_size - image_id = "test-image-id" - image_info = {"Size": image_size, "Id": image_id, "foobar": "etc"} - fakedockerd = FakeDockerd(image_id, image_info, test_tar_image.read_bytes()) - monkeypatch.setattr(registry, "LocalDockerdInterface", lambda: fakedockerd) - - fake_registry = FakeRegistry() - im = ImageHandler(fake_registry) - main_call_result = im.upload_from_local(image_info) - - # check the uploaded blob: just the compressed layer - (uploaded_layer,) = fake_registry.stored_blobs.items() - - (u_layer_digest, (u_layer_content, u_layer_size)) = uploaded_layer - assert gzip.decompress(u_layer_content) == test_tar_layer_content - assert u_layer_size == len(u_layer_content) - assert u_layer_digest == "sha256:" + hashlib.sha256(u_layer_content).hexdigest() - - # check the uploaded manifest metadata and real content - (uploaded_manifest,) = fake_registry.stored_manifests.items() - (u_manifest_digest, (u_manifest_content, u_manifest_multiple)) = uploaded_manifest - assert ( - u_manifest_digest - == "sha256:" + hashlib.sha256(u_manifest_content.encode("utf8")).hexdigest() - ) - assert u_manifest_multiple is False - - # the response from the function we're testing is the final remote digest - assert main_call_result == u_manifest_digest - - u_manifest = json.loads(u_manifest_content) - assert u_manifest["mediaType"] == MANIFEST_V2_MIMETYPE - assert u_manifest["schemaVersion"] == 2 - - assert "config" not in u_manifest - assert u_manifest["layers"] == [ - { - "digest": u_layer_digest, - "mediaType": LAYER_MIMETYPE, - "size": u_layer_size, - } - ] - - # check the output logs - emitter.assert_interactions( - [ - call("progress", f"Getting the image from the local repo; size={image_size}"), - call("progress_bar", "Reading image...", image_size), - call("advance", image_size), - call("progress", "Extracting file 'layer.bin' from local tar (compress=True)"), - call( - "progress", - f"Uploading layer blob 1/1, size={u_layer_size}, digest={u_layer_digest}", - ), - ] - ) diff --git a/tests/conftest.py b/tests/conftest.py index 1eeecf33d..455b71709 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -22,6 +22,7 @@ import tempfile import types from collections.abc import Iterator +from typing import Any from unittest import mock import craft_parts @@ -39,24 +40,39 @@ @pytest.fixture -def simple_charm(): - return project.BasesCharm( - type="charm", - name="charmy-mccharmface", - summary="Charmy!", - description="Very charming!", - bases=[ - { - "build-on": [ - { - "name": "ubuntu", - "channel": "22.04", - "architectures": [util.get_host_architecture()], - } - ], - "run-on": [{"name": "ubuntu", "channel": "22.04", "architectures": ["arm64"]}], - } - ], +def basic_charm_dict() -> dict[str, Any]: + return { + "type": "charm", + "name": "charmy-mccharmface", + "summary": "Charmy!", + "description": "Very charming!", + } + + +@pytest.fixture +def simple_charm(basic_charm_dict: dict[str, Any]): + return project.BasesCharm.unmarshal( + basic_charm_dict + | { + "bases": [ + { + "build-on": [ + { + "name": "ubuntu", + "channel": "22.04", + "architectures": [util.get_host_architecture()], + } + ], + "run-on": [ + { + "name": "ubuntu", + "channel": "22.04", + "architectures": ["arm64"], + } + ], + } + ], + } ) @@ -99,6 +115,10 @@ def service_factory( cache_dir=pathlib.Path("/cache"), build_plan=default_build_plan, ) + factory.update_kwargs( + "charm_libs", + project_dir=fake_project_dir, + ) factory.project = simple_charm @@ -109,16 +129,19 @@ def service_factory( @pytest.fixture -def default_build_plan(): +def default_build_info() -> models.BuildInfo: arch = util.get_host_architecture() - return [ - models.BuildInfo( - base=bases.BaseName("ubuntu", "22.04"), - build_on=arch, - build_for="arm64", - platform="distro-1-test64", - ) - ] + return models.BuildInfo( + base=bases.BaseName("ubuntu", "22.04"), + build_on=arch, + build_for="arm64", + platform="distro-1-test64", + ) + + +@pytest.fixture +def default_build_plan(default_build_info: models.BuildInfo): + return [default_build_info] @pytest.fixture @@ -300,7 +323,9 @@ def helper(*match_lines): for match_line in match_lines: if match_line not in printed_lines: printed_repr = "\n".join(map(repr, printed_lines)) - pytest.fail(f"Line {match_line!r} not found in the output found:\n{printed_repr}") + pytest.fail( + f"Line {match_line!r} not found in the output found:\n{printed_repr}" + ) return helper @@ -348,7 +373,7 @@ def charm_plugin(tmp_path): "charm-python-packages": ["pkg3", "pkg4"], "charm-requirements": requirement_files, } - plugin_properties = parts.CharmPluginProperties.unmarshal(spec) + plugin_properties = charmcraft.parts.plugins.CharmPluginProperties.unmarshal(spec) part_spec = plugins.extract_part_properties(spec, plugin_name="charm") part = craft_parts.Part( "foo", part_spec, project_dirs=project_dirs, plugin_properties=plugin_properties @@ -360,7 +385,9 @@ def charm_plugin(tmp_path): ) part_info = craft_parts.PartInfo(project_info=project_info, part=part) - return plugins.get_plugin(part=part, part_info=part_info, properties=plugin_properties) + return plugins.get_plugin( + part=part, part_info=part_info, properties=plugin_properties + ) @pytest.fixture @@ -370,7 +397,7 @@ def bundle_plugin(tmp_path): "plugin": "bundle", "source": str(tmp_path), } - plugin_properties = charmcraft.parts.bundle.BundlePluginProperties.unmarshal(spec) + plugin_properties = charmcraft.parts.plugins.BundlePluginProperties.unmarshal(spec) part_spec = plugins.extract_part_properties(spec, plugin_name="bundle") part = craft_parts.Part( "foo", part_spec, project_dirs=project_dirs, plugin_properties=plugin_properties @@ -382,4 +409,54 @@ def bundle_plugin(tmp_path): ) part_info = craft_parts.PartInfo(project_info=project_info, part=part) - return plugins.get_plugin(part=part, part_info=part_info, properties=plugin_properties) + return plugins.get_plugin( + part=part, part_info=part_info, properties=plugin_properties + ) + + +@pytest.fixture +def poetry_plugin(tmp_path: pathlib.Path): + project_dirs = craft_parts.ProjectDirs(work_dir=tmp_path) + spec = { + "plugin": "poetry", + "source": str(tmp_path), + } + plugin_properties = parts.plugins.PoetryPluginProperties.unmarshal(spec) + part_spec = craft_parts.plugins.extract_part_properties(spec, plugin_name="poetry") + part = craft_parts.Part( + "foo", part_spec, project_dirs=project_dirs, plugin_properties=plugin_properties + ) + project_info = craft_parts.ProjectInfo( + application_name="test", + project_dirs=project_dirs, + cache_dir=tmp_path, + ) + part_info = craft_parts.PartInfo(project_info=project_info, part=part) + + return craft_parts.plugins.get_plugin( + part=part, part_info=part_info, properties=plugin_properties + ) + + +@pytest.fixture +def python_plugin(tmp_path: pathlib.Path): + project_dirs = craft_parts.ProjectDirs(work_dir=tmp_path) + spec = { + "plugin": "python", + "source": str(tmp_path), + } + plugin_properties = parts.plugins.PythonPluginProperties.unmarshal(spec) + part_spec = craft_parts.plugins.extract_part_properties(spec, plugin_name="python") + part = craft_parts.Part( + "foo", part_spec, project_dirs=project_dirs, plugin_properties=plugin_properties + ) + project_info = craft_parts.ProjectInfo( + application_name="test", + project_dirs=project_dirs, + cache_dir=tmp_path, + ) + part_info = craft_parts.PartInfo(project_info=project_info, part=part) + + return craft_parts.plugins.get_plugin( + part=part, part_info=part_info, properties=plugin_properties + ) diff --git a/tests/extensions/test_app.py b/tests/extensions/test_app.py index d88a66c7a..39de39c60 100644 --- a/tests/extensions/test_app.py +++ b/tests/extensions/test_app.py @@ -19,6 +19,7 @@ from charmcraft.extensions import apply_extensions from charmcraft.extensions.app import ( DjangoFramework, + FastAPIFramework, FlaskFramework, GoFramework, ) @@ -59,7 +60,7 @@ def flask_input_yaml_fixture(): {"lib": "traefik_k8s.ingress", "version": "2"}, {"lib": "observability_libs.juju_topology", "version": "0"}, {"lib": "grafana_k8s.grafana_dashboard", "version": "0"}, - {"lib": "loki_k8s.loki_push_api", "version": "0"}, + {"lib": "loki_k8s.loki_push_api", "version": "1"}, {"lib": "data_platform_libs.data_interfaces", "version": "0"}, {"lib": "prometheus_k8s.prometheus_scrape", "version": "0"}, {"lib": "redis_k8s.redis", "version": "0"}, @@ -113,7 +114,7 @@ def flask_input_yaml_fixture(): }, "extensions": ["django-framework"], }, - True, + False, { "actions": DjangoFramework.actions, "assumes": ["k8s-api"], @@ -135,7 +136,7 @@ def flask_input_yaml_fixture(): {"lib": "traefik_k8s.ingress", "version": "2"}, {"lib": "observability_libs.juju_topology", "version": "0"}, {"lib": "grafana_k8s.grafana_dashboard", "version": "0"}, - {"lib": "loki_k8s.loki_push_api", "version": "0"}, + {"lib": "loki_k8s.loki_push_api", "version": "1"}, {"lib": "data_platform_libs.data_interfaces", "version": "0"}, {"lib": "prometheus_k8s.prometheus_scrape", "version": "0"}, {"lib": "redis_k8s.redis", "version": "0"}, @@ -178,14 +179,20 @@ def flask_input_yaml_fixture(): "name": "test-go", "summary": "test summary", "description": "test description", - "bases": [{"name": "ubuntu", "channel": "24.04"}], + "base": "ubuntu@24.04", + "platforms": { + "amd64": None, + }, "extensions": ["go-framework"], }, True, { "actions": GoFramework.actions, "assumes": ["k8s-api"], - "bases": [{"channel": "24.04", "name": "ubuntu"}], + "base": "ubuntu@24.04", + "platforms": { + "amd64": None, + }, "containers": { "app": {"resource": "app-image"}, }, @@ -195,7 +202,7 @@ def flask_input_yaml_fixture(): {"lib": "traefik_k8s.ingress", "version": "2"}, {"lib": "observability_libs.juju_topology", "version": "0"}, {"lib": "grafana_k8s.grafana_dashboard", "version": "0"}, - {"lib": "loki_k8s.loki_push_api", "version": "0"}, + {"lib": "loki_k8s.loki_push_api", "version": "1"}, {"lib": "data_platform_libs.data_interfaces", "version": "0"}, {"lib": "prometheus_k8s.prometheus_scrape", "version": "0"}, {"lib": "redis_k8s.redis", "version": "0"}, @@ -232,9 +239,77 @@ def flask_input_yaml_fixture(): "type": "charm", }, ), + ( + { + "type": "charm", + "name": "test-fastapi", + "summary": "test summary", + "description": "test description", + "base": "ubuntu@24.04", + "platforms": { + "amd64": None, + }, + "extensions": ["fastapi-framework"], + }, + True, + { + "actions": FastAPIFramework.actions, + "assumes": ["k8s-api"], + "base": "ubuntu@24.04", + "platforms": { + "amd64": None, + }, + "containers": { + "app": {"resource": "app-image"}, + }, + "description": "test description", + "name": "test-fastapi", + "charm-libs": [ + {"lib": "traefik_k8s.ingress", "version": "2"}, + {"lib": "observability_libs.juju_topology", "version": "0"}, + {"lib": "grafana_k8s.grafana_dashboard", "version": "0"}, + {"lib": "loki_k8s.loki_push_api", "version": "1"}, + {"lib": "data_platform_libs.data_interfaces", "version": "0"}, + {"lib": "prometheus_k8s.prometheus_scrape", "version": "0"}, + {"lib": "redis_k8s.redis", "version": "0"}, + {"lib": "data_platform_libs.s3", "version": "0"}, + {"lib": "saml_integrator.saml", "version": "0"}, + ], + "config": { + "options": {**FastAPIFramework.options}, + }, + "parts": { + "charm": { + "plugin": "charm", + "source": ".", + "build-snaps": ["rustup"], + "override-build": "rustup default stable\ncraftctl default", + } + }, + "peers": {"secret-storage": {"interface": "secret-storage"}}, + "provides": { + "metrics-endpoint": {"interface": "prometheus_scrape"}, + "grafana-dashboard": {"interface": "grafana_dashboard"}, + }, + "requires": { + "logging": {"interface": "loki_push_api"}, + "ingress": {"interface": "ingress", "limit": 1}, + }, + "resources": { + "app-image": { + "description": "fastapi application image.", + "type": "oci-image", + }, + }, + "summary": "test summary", + "type": "charm", + }, + ), ], ) -def test_apply_extensions_correct(monkeypatch, experimental, tmp_path, input_yaml, expected): +def test_apply_extensions_correct( + monkeypatch, experimental, tmp_path, input_yaml, expected +): if experimental: monkeypatch.setenv("CHARMCRAFT_ENABLE_EXPERIMENTAL_EXTENSIONS", "1") diff --git a/tests/extensions/test_extensions.py b/tests/extensions/test_extensions.py index 794b09d98..8a6770cbd 100644 --- a/tests/extensions/test_extensions.py +++ b/tests/extensions/test_extensions.py @@ -128,8 +128,12 @@ def test_experimental_no_env(fake_extensions, tmp_path): "description": "test description", "bases": [ { - "build-on": [{"name": "ubuntu", "channel": "20.04", "architectures": ["amd64"]}], - "run-on": [{"name": "ubuntu", "channel": "20.04", "architectures": ["amd64"]}], + "build-on": [ + {"name": "ubuntu", "channel": "20.04", "architectures": ["amd64"]} + ], + "run-on": [ + {"name": "ubuntu", "channel": "20.04", "architectures": ["amd64"]} + ], } ], "extensions": [ExperimentalExtension.name], @@ -149,8 +153,12 @@ def test_wrong_base(fake_extensions, tmp_path): "description": "test description", "bases": [ { - "build-on": [{"name": "ubuntu", "channel": "20.04", "architectures": ["amd64"]}], - "run-on": [{"name": "ubuntu", "channel": "20.04", "architectures": ["amd64"]}], + "build-on": [ + {"name": "ubuntu", "channel": "20.04", "architectures": ["amd64"]} + ], + "run-on": [ + {"name": "ubuntu", "channel": "20.04", "architectures": ["amd64"]} + ], } ], "extensions": [FakeExtension.name], @@ -188,7 +196,13 @@ def test_apply_extensions(fake_extensions, tmp_path): "description": "test description", "bases": [{"name": "ubuntu", "channel": "22.04"}], "extensions": [FullExtension.name], - "parts": {"my-part": {"plugin": "nil", "source": None, "stage-packages": ["old-package"]}}, + "parts": { + "my-part": { + "plugin": "nil", + "source": None, + "stage-packages": ["old-package"], + } + }, } applied = extensions.apply_extensions(tmp_path, charmcraft_config) diff --git a/tests/extensions/test_registry.py b/tests/extensions/test_registry.py index b4cf9f303..50647b4ac 100644 --- a/tests/extensions/test_registry.py +++ b/tests/extensions/test_registry.py @@ -85,7 +85,11 @@ def test_get_extension_class_error(fake_extensions): def test_get_extensions(fake_extensions): assert extensions.get_extensions() == [ - {"name": "fake-extension-1", "bases": [("ubuntu@22.04")], "experimental_bases": []}, + { + "name": "fake-extension-1", + "bases": [("ubuntu@22.04")], + "experimental_bases": [], + }, { "name": "fake-extension-2", "bases": [("ubuntu@22.04")], diff --git a/tests/integration/commands/test_analyse.py b/tests/integration/commands/test_analyse.py index 36e00fc49..6c4474c6f 100644 --- a/tests/integration/commands/test_analyse.py +++ b/tests/integration/commands/test_analyse.py @@ -1,4 +1,4 @@ -# Copyright 2021-2022 Canonical Ltd. +# Copyright 2021-2024 Canonical Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,17 +14,18 @@ # # For further info, check https://github.com/canonical/charmcraft + import json import sys import zipfile from argparse import ArgumentParser, Namespace +from pathlib import Path import pytest from craft_cli import CraftError from charmcraft import linters from charmcraft.application.commands.analyse import Analyse -from charmcraft.cmdbase import JSON_FORMAT from charmcraft.models.lint import LintResult @@ -54,15 +55,17 @@ def test_expanded_charm_permissions(config, fake_project_dir, monkeypatch, modeb @pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported") -def test_corrupt_charm(fake_project_dir, config): +def test_corrupt_charm(new_path, config): """There was a problem opening the indicated charm.""" - charm_file = fake_project_dir / "foobar.charm" + charm_file = new_path / "foobar.charm" charm_file.write_text("this is not a real zip content") args = Namespace(filepath=charm_file, force=None, format=None, ignore=None) with pytest.raises(CraftError) as cm: Analyse(config).run(args) - assert str(cm.value) == (f"Cannot open charm file '{charm_file}': File is not a zip file") + assert str(cm.value) == ( + f"Cannot open charm file '{charm_file}': File is not a zip file" + ) def create_a_valid_zip(tmp_path): @@ -73,9 +76,9 @@ def create_a_valid_zip(tmp_path): return zip_file -def test_integration_linters(fake_project_dir, emitter, config, monkeypatch): +def test_integration_linters(new_path, emitter, config, monkeypatch): """Integration test with a real analysis.""" - fake_charm = create_a_valid_zip(fake_project_dir) + fake_charm = create_a_valid_zip(new_path) args = Namespace(filepath=fake_charm, force=None, format=None, ignore=None) Analyse(config).run(args) @@ -85,9 +88,15 @@ def test_integration_linters(fake_project_dir, emitter, config, monkeypatch): ) -@pytest.mark.parametrize("indicated_format", [None, JSON_FORMAT]) +@pytest.mark.parametrize("indicated_format", [None, "json"]) def test_complete_set_of_results( - check, emitter, service_factory, config, monkeypatch, fake_project_dir, indicated_format + check, + emitter, + service_factory, + config, + monkeypatch, + fake_project_dir, + indicated_format, ): """Show a complete basic case of results.""" # fake results from the analyzer @@ -144,7 +153,9 @@ def test_complete_set_of_results( ] fake_charm = create_a_valid_zip(fake_project_dir) - args = Namespace(filepath=fake_charm, force=None, format=indicated_format, ignore=None) + args = Namespace( + filepath=fake_charm, force=None, format=indicated_format, ignore=None + ) monkeypatch.setattr( service_factory.analysis, "lint_directory", lambda *a, **k: linting_results ) @@ -219,7 +230,9 @@ def test_complete_set_of_results( assert expected == json.loads(text) -def test_only_attributes(emitter, service_factory, config, monkeypatch, fake_project_dir): +def test_only_attributes( + emitter, service_factory, config, monkeypatch, fake_project_dir +): """Show only attribute results (the rest may be ignored).""" # fake results from the analyzer linting_results = [ @@ -239,7 +252,9 @@ def test_only_attributes(emitter, service_factory, config, monkeypatch, fake_pro ) retcode = Analyse(config).run(args) - emitter.assert_progress("check-attribute: [CHECK-RESULT] text (url)", permanent=True) + emitter.assert_progress( + "check-attribute: [CHECK-RESULT] text (url)", permanent=True + ) assert retcode == 0 @@ -291,7 +306,9 @@ def test_only_errors(emitter, service_factory, config, monkeypatch, fake_project assert retcode == 2 -def test_both_errors_and_warnings(emitter, service_factory, config, monkeypatch, fake_project_dir): +def test_both_errors_and_warnings( + emitter, service_factory, config, monkeypatch, fake_project_dir +): """Show error and warnings results.""" # fake results from the analyzer linting_results = [ @@ -369,3 +386,39 @@ def test_only_fatal(emitter, service_factory, config, monkeypatch, fake_project_ emitter.assert_progress("check-lint: [FATAL] text (url)", permanent=True) assert retcode == 1 + + +def zip_directory(directory_path: Path, zip_path: Path): + """Directory to zip with contents and permissions.""" + with zipfile.ZipFile(str(zip_path), "w", zipfile.ZIP_DEFLATED) as zipf: + for file_path in directory_path.rglob("*"): + rel_path = file_path.relative_to(directory_path) + zip_info = zipfile.ZipInfo(str(rel_path)) + zip_info.external_attr = (file_path.stat().st_mode & 0o777) << 16 + + if file_path.is_dir(): + zip_info.filename += "/" + zipf.writestr(zip_info, "") + else: + zipf.writestr(zip_info, file_path.read_bytes()) + + +@pytest.fixture +def linter_charms(request): + return request.config.rootpath / "tests/integration/ops-main-linter-charms" + + +@pytest.mark.parametrize(("charm", "rv"), [("smoke", 0), ("negative", 2)]) +def test_ops_main_linter( + tmp_path: Path, linter_charms: Path, emitter, config, charm: str, rv: int +): + zip_directory(linter_charms / charm, (charm_path := tmp_path / "this.charm")) + + retcode = Analyse(config=config).run( + Namespace(filepath=charm_path, force=None, format=None, ignore=None) + ) + + assert retcode == rv + + if rv: + assert "ops.main() call missing" in str(emitter.interactions) diff --git a/tests/integration/commands/test_extensions.py b/tests/integration/commands/test_extensions.py index 3569f991a..c0aa1e321 100644 --- a/tests/integration/commands/test_extensions.py +++ b/tests/integration/commands/test_extensions.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Tests for extension commands.""" + import argparse import textwrap @@ -41,7 +42,8 @@ def is_experimental(base: tuple[str, str] | None) -> bool: @pytest.fixture(autouse=True, scope="module") def registered_extensions(): default_extensions = { - name: extensions.get_extension_class(name) for name in extensions.get_extension_names() + name: extensions.get_extension_class(name) + for name in extensions.get_extension_names() } for ext in default_extensions: extensions.unregister(ext) diff --git a/tests/integration/commands/test_init.py b/tests/integration/commands/test_init.py index a4f632809..9108a7d1e 100644 --- a/tests/integration/commands/test_init.py +++ b/tests/integration/commands/test_init.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Tests for init command.""" + import argparse import contextlib import os @@ -24,7 +25,6 @@ import sys from unittest import mock -import pydocstyle import pytest import pytest_check @@ -97,7 +97,9 @@ @pytest.fixture def init_command(): - return commands.InitCommand({"app": charmcraft.application.APP_METADATA, "services": None}) + return commands.InitCommand( + {"app": charmcraft.application.APP_METADATA, "services": None} + ) def create_namespace( @@ -148,7 +150,9 @@ def test_files_created_correct( tox_ini = (new_path / "tox.ini").read_text(encoding="utf-8") pytest_check.equal(actual_files, expected_files) - pytest_check.is_true(re.search(rf"^name: {charm_name}$", charmcraft_yaml, re.MULTILINE)) + pytest_check.is_true( + re.search(rf"^name: {charm_name}$", charmcraft_yaml, re.MULTILINE) + ) pytest_check.is_true(re.search(rf"^# Copyright \d+ {author}", tox_ini)) @@ -204,7 +208,9 @@ def test_gecos_valid_author(monkeypatch, new_path, init_command, author): ), ], ) -def test_gecos_user_not_found(monkeypatch, new_path, init_command, mock_getpwuid, error_msg): +def test_gecos_user_not_found( + monkeypatch, new_path, init_command, mock_getpwuid, error_msg +): monkeypatch.setattr(pwd, "getpwuid", mock_getpwuid) with pytest.raises(errors.CraftError, match=error_msg): @@ -284,34 +290,14 @@ def test_tox_success(new_path, init_command, profile): if not (new_path / "tox.ini").exists(): pytest.skip("init template doesn't contain tox.ini file") - result = subprocess.run( - ["tox", "-v"], - cwd=new_path, - env=env, - text=True, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - check=False, - ) - assert result.returncode == 0, "Tox run failed:\n" + result.stdout - - -@pytest.mark.parametrize("profile", list(commands.init.PROFILES)) -def test_pep257(new_path, init_command, profile): - to_ignore = { - "D105", # Missing docstring in magic method - "D107", # Missing docstring in __init__ - } - to_include = pydocstyle.violations.conventions.pep257 - to_ignore - - init_command.run(create_namespace(profile=profile)) - - python_paths = (str(path) for path in new_path.rglob("*.py")) - python_paths = (path for path in python_paths if "tests" not in path) - errors = list(pydocstyle.check(python_paths, select=to_include)) - - if errors: - report = [f"Please fix files as suggested by pydocstyle ({len(errors):d} issues):"] - report.extend(str(e) for e in errors) - msg = "\n".join(report) - pytest.fail(msg, pytrace=False) + if list((new_path / "tests").glob("*.py")): # If any tests exist + result = subprocess.run( + ["tox", "-v", "run", "-e", "unit"], + cwd=new_path, + env=env, + text=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + check=False, + ) + assert result.returncode == 0, "Tox run failed:\n" + result.stdout diff --git a/tests/integration/commands/test_pack.py b/tests/integration/commands/test_pack.py index 635bd9a76..fd9d64c51 100644 --- a/tests/integration/commands/test_pack.py +++ b/tests/integration/commands/test_pack.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Integration tests for packing.""" + import sys import zipfile @@ -27,7 +28,8 @@ @pytest.mark.xfail( - sys.platform != "linux", reason="https://github.com/canonical/charmcraft/issues/1552" + sys.platform != "linux", + reason="https://github.com/canonical/charmcraft/issues/1552", ) @pytest.mark.parametrize( ("bundle_yaml", "filename"), @@ -66,14 +68,19 @@ def test_build_basic_bundle(monkeypatch, capsys, app, new_path, bundle_yaml, fil { "build-on": [{"name": "ubuntu", "channel": "22.04"}], "run-on": [ - {"name": "ubuntu", "channel": "22.04", "architectures": ["amd64"]} + { + "name": "ubuntu", + "channel": "22.04", + "architectures": ["amd64"], + } ], } ], }, "ubuntu-22.04-amd64", marks=pytest.mark.skipif( - CURRENT_PLATFORM.release != "22.04", reason="Bases charm only tested on jammy." + CURRENT_PLATFORM.release != "22.04", + reason="Bases charm only tested on jammy.", ), id="bases-charm", ), @@ -85,13 +92,17 @@ def test_build_basic_bundle(monkeypatch, capsys, app, new_path, bundle_yaml, fil "description": "A charm for testing", "base": "ubuntu@22.04", "platforms": { - "ubuntu-22.04-amd64": {"build-on": ["amd64"], "build-for": ["amd64"]} + "ubuntu-22.04-amd64": { + "build-on": ["amd64"], + "build-for": ["amd64"], + } }, "parts": {}, }, "ubuntu-22.04-amd64", marks=pytest.mark.skipif( - CURRENT_PLATFORM.release != "22.04", reason="Jammy charms only tested on jammy" + CURRENT_PLATFORM.release != "22.04", + reason="Jammy charms only tested on jammy", ), id="platforms-jammy-charm", ), @@ -107,7 +118,8 @@ def test_build_basic_bundle(monkeypatch, capsys, app, new_path, bundle_yaml, fil }, util.get_host_architecture(), marks=pytest.mark.skipif( - CURRENT_PLATFORM.release != "22.04", reason="Jammy charms only tested on jammy" + CURRENT_PLATFORM.release != "22.04", + reason="Jammy charms only tested on jammy", ), id="platforms-jammy-basic", ), @@ -142,7 +154,8 @@ def test_build_basic_charm( monkeypatch.setenv("CRAFT_DEBUG", "1") monkeypatch.setattr( - "sys.argv", ["charmcraft", "pack", "--destructive-mode", f"--platform={platform}"] + "sys.argv", + ["charmcraft", "pack", "--destructive-mode", f"--platform={platform}"], ) app.configure({}) diff --git a/tests/integration/commands/test_resource_revisions.py b/tests/integration/commands/test_resource_revisions.py index 3e7f8c636..812a34aca 100644 --- a/tests/integration/commands/test_resource_revisions.py +++ b/tests/integration/commands/test_resource_revisions.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Tests for resource-revisions command.""" + import datetime from argparse import Namespace from unittest import mock @@ -27,7 +28,6 @@ from charmcraft import store from charmcraft.application.commands import ListResourceRevisionsCommand -from charmcraft.cmdbase import JSON_FORMAT from charmcraft.env import CharmhubConfig @@ -47,14 +47,16 @@ def validate_params(config, ephemeral=False, needs_auth=True): yield store_mock -@pytest.mark.parametrize("formatted", [None, JSON_FORMAT]) +@pytest.mark.parametrize("formatted", [None, "json"]) def test_resourcerevisions_simple(emitter, store_mock, config, formatted): """Happy path of one result from the Store.""" store_response = [ CharmResourceRevision( revision=1, size=pydantic.ByteSize(50), - created_at=datetime.datetime(2020, 7, 3, 2, 30, 40, tzinfo=datetime.timezone.utc), + created_at=datetime.datetime( + 2020, 7, 3, 2, 30, 40, tzinfo=datetime.timezone.utc + ), bases=[ResponseCharmResourceBase()], name="testresource", sha256="", @@ -66,7 +68,9 @@ def test_resourcerevisions_simple(emitter, store_mock, config, formatted): ] store_mock.list_resource_revisions.return_value = store_response - args = Namespace(charm_name="testcharm", resource_name="testresource", format=formatted) + args = Namespace( + charm_name="testcharm", resource_name="testresource", format=formatted + ) ListResourceRevisionsCommand(config).run(args) assert store_mock.mock_calls == [ @@ -90,13 +94,15 @@ def test_resourcerevisions_simple(emitter, store_mock, config, formatted): emitter.assert_messages(expected) -@pytest.mark.parametrize("formatted", [None, JSON_FORMAT]) +@pytest.mark.parametrize("formatted", [None, "json"]) def test_resourcerevisions_empty(emitter, store_mock, config, formatted): """No results from the store.""" store_response = [] store_mock.list_resource_revisions.return_value = store_response - args = Namespace(charm_name="testcharm", resource_name="testresource", format=formatted) + args = Namespace( + charm_name="testcharm", resource_name="testresource", format=formatted + ) ListResourceRevisionsCommand(config).run(args) if formatted: @@ -105,7 +111,7 @@ def test_resourcerevisions_empty(emitter, store_mock, config, formatted): emitter.assert_message("No revisions found.") -@pytest.mark.parametrize("formatted", [None, JSON_FORMAT]) +@pytest.mark.parametrize("formatted", [None, "json"]) def test_resourcerevisions_ordered_by_revision(emitter, store_mock, config, formatted): """Results are presented ordered by revision in the table.""" # three Revisions with all values weirdly similar, the only difference is revision, so @@ -163,7 +169,9 @@ def test_resourcerevisions_ordered_by_revision(emitter, store_mock, config, form ] store_mock.list_resource_revisions.return_value = store_response - args = Namespace(charm_name="testcharm", resource_name="testresource", format=formatted) + args = Namespace( + charm_name="testcharm", resource_name="testresource", format=formatted + ) ListResourceRevisionsCommand(config).run(args) if formatted: @@ -184,9 +192,20 @@ def test_resourcerevisions_ordered_by_revision(emitter, store_mock, config, form "revision": 4, "created at": "2020-07-03T20:30:40+00:00", "size": 876543, - "bases": [{"name": "all", "channel": "all", "architectures": ["amd64", "arm64"]}], + "bases": [ + { + "name": "all", + "channel": "all", + "architectures": ["amd64", "arm64"], + } + ], + }, + { + "revision": 2, + "created at": "2020-07-03T20:30:40+00:00", + "size": 50, + "bases": [], }, - {"revision": 2, "created at": "2020-07-03T20:30:40+00:00", "size": 50, "bases": []}, ] emitter.assert_json_output(expected) else: diff --git a/tests/integration/commands/test_set_resource_architectures.py b/tests/integration/commands/test_set_resource_architectures.py index ae40e0502..0e6ba6974 100644 --- a/tests/integration/commands/test_set_resource_architectures.py +++ b/tests/integration/commands/test_set_resource_architectures.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Integration tests for set-resource-architectures command.""" + import argparse import textwrap @@ -50,7 +51,11 @@ def cmd(service_factory): [ get_fake_revision( revision=1, - bases=[models.ResponseCharmResourceBase(architectures=["amd64", "arm64"])], + bases=[ + models.ResponseCharmResourceBase( + architectures=["amd64", "arm64"] + ) + ], ), get_fake_revision( revision=2, @@ -69,7 +74,11 @@ def cmd(service_factory): [ get_fake_revision( revision=1, - bases=[models.ResponseCharmResourceBase(architectures=["amd64", "arm64"])], + bases=[ + models.ResponseCharmResourceBase( + architectures=["amd64", "arm64"] + ) + ], ), get_fake_revision( revision=2, diff --git a/tests/integration/commands/test_store_commands.py b/tests/integration/commands/test_store_commands.py index 873c40cb6..3b1f12e59 100644 --- a/tests/integration/commands/test_store_commands.py +++ b/tests/integration/commands/test_store_commands.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Integration tests for store commands.""" + import argparse import sys from unittest import mock @@ -22,7 +23,6 @@ from charmcraft import env from charmcraft.application.commands import FetchLibCommand -from charmcraft.cmdbase import JSON_FORMAT from charmcraft.store.models import Library from tests import factory @@ -44,8 +44,10 @@ def validate_params(config, ephemeral=False, needs_auth=True): # region fetch-lib tests -@pytest.mark.parametrize("formatted", [None, JSON_FORMAT]) -def test_fetchlib_simple_downloaded(emitter, store_mock, tmp_path, monkeypatch, config, formatted): +@pytest.mark.parametrize("formatted", [None, "json"]) +def test_fetchlib_simple_downloaded( + emitter, store_mock, tmp_path, monkeypatch, config, formatted +): """Happy path fetching the lib for the first time (downloading it).""" monkeypatch.chdir(tmp_path) @@ -102,7 +104,9 @@ def test_fetchlib_simple_downloaded(emitter, store_mock, tmp_path, monkeypatch, assert saved_file.read_text() == lib_content -def test_fetchlib_simple_dash_in_name(emitter, store_mock, tmp_path, monkeypatch, config): +def test_fetchlib_simple_dash_in_name( + emitter, store_mock, tmp_path, monkeypatch, config +): """Happy path fetching the lib for the first time (downloading it).""" monkeypatch.chdir(tmp_path) @@ -144,7 +148,9 @@ def test_fetchlib_simple_dash_in_name(emitter, store_mock, tmp_path, monkeypatch assert saved_file.read_text() == lib_content -def test_fetchlib_simple_dash_in_name_on_disk(emitter, store_mock, tmp_path, monkeypatch, config): +def test_fetchlib_simple_dash_in_name_on_disk( + emitter, store_mock, tmp_path, monkeypatch, config +): """Happy path fetching the lib for the first time (downloading it).""" monkeypatch.chdir(tmp_path) @@ -228,7 +234,7 @@ def test_fetchlib_simple_updated(emitter, store_mock, tmp_path, monkeypatch, con @pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported") -@pytest.mark.parametrize("formatted", [None, JSON_FORMAT]) +@pytest.mark.parametrize("formatted", [None, "json"]) def test_fetchlib_all(emitter, store_mock, tmp_path, monkeypatch, config, formatted): """Update all the libraries found in disk.""" monkeypatch.chdir(tmp_path) @@ -338,16 +344,18 @@ def test_fetchlib_all(emitter, store_mock, tmp_path, monkeypatch, config, format assert saved_file.read_text() == "new lib content 2" -@pytest.mark.parametrize("formatted", [None, JSON_FORMAT]) +@pytest.mark.parametrize("formatted", [None, "json"]) def test_fetchlib_store_not_found(emitter, store_mock, config, formatted): """The indicated library is not found in the store.""" store_mock.get_libraries_tips.return_value = {} args = argparse.Namespace(library="charms.testcharm.v0.testlib", format=formatted) FetchLibCommand(config).run(args) - store_mock.get_libraries_tips.assert_called_once_with( - [{"charm_name": "testcharm", "lib_name": "testlib", "api": 0}] - ), + ( + store_mock.get_libraries_tips.assert_called_once_with( + [{"charm_name": "testcharm", "lib_name": "testlib", "api": 0}] + ), + ) error_message = "Library charms.testcharm.v0.testlib not found in Charmhub." if formatted: expected = [ @@ -364,8 +372,10 @@ def test_fetchlib_store_not_found(emitter, store_mock, config, formatted): emitter.assert_message(error_message) -@pytest.mark.parametrize("formatted", [None, JSON_FORMAT]) -def test_fetchlib_store_is_old(emitter, store_mock, tmp_path, monkeypatch, config, formatted): +@pytest.mark.parametrize("formatted", [None, "json"]) +def test_fetchlib_store_is_old( + emitter, store_mock, tmp_path, monkeypatch, config, formatted +): """The store has an older version that what is found locally.""" monkeypatch.chdir(tmp_path) @@ -386,8 +396,12 @@ def test_fetchlib_store_is_old(emitter, store_mock, tmp_path, monkeypatch, confi args = argparse.Namespace(library="charms.testcharm.v0.testlib", format=formatted) FetchLibCommand(config).run(args) - store_mock.get_libraries_tips.assert_called_once_with([{"lib_id": lib_id, "api": 0}]) - error_message = "Library charms.testcharm.v0.testlib has local changes, cannot be updated." + store_mock.get_libraries_tips.assert_called_once_with( + [{"lib_id": lib_id, "api": 0}] + ) + error_message = ( + "Library charms.testcharm.v0.testlib has local changes, cannot be updated." + ) if formatted: expected = [ { @@ -403,7 +417,7 @@ def test_fetchlib_store_is_old(emitter, store_mock, tmp_path, monkeypatch, confi emitter.assert_message(error_message) -@pytest.mark.parametrize("formatted", [None, JSON_FORMAT]) +@pytest.mark.parametrize("formatted", [None, "json"]) def test_fetchlib_store_same_versions_same_hash( emitter, store_mock, tmp_path, monkeypatch, config, formatted ): @@ -411,7 +425,9 @@ def test_fetchlib_store_same_versions_same_hash( monkeypatch.chdir(tmp_path) lib_id = "test-example-lib-id" - _, c_hash = factory.create_lib_filepath("testcharm", "testlib", api=0, patch=7, lib_id=lib_id) + _, c_hash = factory.create_lib_filepath( + "testcharm", "testlib", api=0, patch=7, lib_id=lib_id + ) store_mock.get_libraries_tips.return_value = { (lib_id, 0): Library( @@ -427,8 +443,12 @@ def test_fetchlib_store_same_versions_same_hash( args = argparse.Namespace(library="charms.testcharm.v0.testlib", format=formatted) FetchLibCommand(config).run(args) - store_mock.get_libraries_tips.assert_called_once_with([{"lib_id": lib_id, "api": 0}]) - error_message = "Library charms.testcharm.v0.testlib was already up to date in version 0.7." + store_mock.get_libraries_tips.assert_called_once_with( + [{"lib_id": lib_id, "api": 0}] + ) + error_message = ( + "Library charms.testcharm.v0.testlib was already up to date in version 0.7." + ) if formatted: expected = [ { @@ -444,7 +464,7 @@ def test_fetchlib_store_same_versions_same_hash( emitter.assert_message(error_message) -@pytest.mark.parametrize("formatted", [None, JSON_FORMAT]) +@pytest.mark.parametrize("formatted", [None, "json"]) def test_fetchlib_store_same_versions_different_hash( emitter, store_mock, tmp_path, monkeypatch, config, formatted ): @@ -471,7 +491,9 @@ def test_fetchlib_store_same_versions_different_hash( assert store_mock.mock_calls == [ mock.call.get_libraries_tips([{"lib_id": lib_id, "api": 0}]), ] - error_message = "Library charms.testcharm.v0.testlib has local changes, cannot be updated." + error_message = ( + "Library charms.testcharm.v0.testlib has local changes, cannot be updated." + ) if formatted: expected = [ { diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index b65258a67..f31f60df8 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -14,19 +14,60 @@ # # For further info, check https://github.com/canonical/charmcraft """General fixtures for integration tests.""" + +import pathlib +from typing import Any from unittest import mock +import craft_platforms import craft_store +import distro import pytest +from craft_application import util from charmcraft import application, services from charmcraft.application import commands +from charmcraft.models import project + + +@pytest.fixture +def project_path(tmp_path: pathlib.Path): + path = tmp_path / "project" + path.mkdir() + return path + + +@pytest.fixture +def charm_project( + basic_charm_dict: dict[str, Any], project_path: pathlib.Path, request +): + # Workaround for testing across systems. If we're not on Ubuntu, make an Ubuntu 24.04 charm. + # If we are on Ubuntu, use the current version. + distro_id = "ubuntu" + distro_version = distro.version() if craft_platforms.is_ubuntu_like() else "24.04" + + return project.PlatformCharm.unmarshal( + basic_charm_dict + | { + "base": f"{distro_id}@{distro_version}", + "platforms": {util.get_host_architecture(): None}, + }, + ) @pytest.fixture -def service_factory(): +def service_factory( + new_path: pathlib.Path, charm_project, default_build_plan, project_path +): factory = services.CharmcraftServiceFactory(app=application.APP_METADATA) factory.store.client = mock.Mock(spec_set=craft_store.StoreClient) + factory.project = charm_project + factory.set_kwargs( + "lifecycle", + work_dir=new_path, + build_plan=default_build_plan, + cache_dir="~/.cache", + ) return factory diff --git a/tests/integration/ops-main-linter-charms/negative/charmcraft.yaml b/tests/integration/ops-main-linter-charms/negative/charmcraft.yaml new file mode 100644 index 000000000..9095782db --- /dev/null +++ b/tests/integration/ops-main-linter-charms/negative/charmcraft.yaml @@ -0,0 +1,16 @@ +--- +name: smoke +type: charm +title: n/a +summary: n/a +description: n/a + +base: ubuntu@24.04 +build-base: ubuntu@24.04 +platforms: + arm64: + +parts: + charm: + plugin: charm + source: . diff --git a/tests/integration/ops-main-linter-charms/negative/dispatch b/tests/integration/ops-main-linter-charms/negative/dispatch new file mode 100755 index 000000000..1aa294960 --- /dev/null +++ b/tests/integration/ops-main-linter-charms/negative/dispatch @@ -0,0 +1,4 @@ +#!/bin/sh + +JUJU_DISPATCH_PATH="${JUJU_DISPATCH_PATH:-$0}" PYTHONPATH=lib:venv \ + exec ./src/charm.py diff --git a/tests/integration/ops-main-linter-charms/negative/metadata.yaml b/tests/integration/ops-main-linter-charms/negative/metadata.yaml new file mode 100644 index 000000000..fe71596bd --- /dev/null +++ b/tests/integration/ops-main-linter-charms/negative/metadata.yaml @@ -0,0 +1,11 @@ +--- +name: smoke +type: charm +title: n/a +summary: n/a +description: n/a + +base: ubuntu@24.04 +build-base: ubuntu@24.04 +platforms: + arm64: diff --git a/tests/integration/ops-main-linter-charms/negative/src/charm.py b/tests/integration/ops-main-linter-charms/negative/src/charm.py new file mode 100755 index 000000000..14d38b313 --- /dev/null +++ b/tests/integration/ops-main-linter-charms/negative/src/charm.py @@ -0,0 +1,12 @@ +# Copyright 2024 Canonical Ltd. +import ops # type: ignore + + +class SomeCharm(ops.CharmBase): ... + + +# ruff: noqa: ERA001 +# charmcraft analyse should detect that ops.main() call is missing +# +# if __name__ == "__main__": +# ops.main(SomeCharm) diff --git a/tests/integration/ops-main-linter-charms/negative/venv/ops/.gitkeep b/tests/integration/ops-main-linter-charms/negative/venv/ops/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/tests/integration/ops-main-linter-charms/smoke/charmcraft.yaml b/tests/integration/ops-main-linter-charms/smoke/charmcraft.yaml new file mode 100644 index 000000000..9095782db --- /dev/null +++ b/tests/integration/ops-main-linter-charms/smoke/charmcraft.yaml @@ -0,0 +1,16 @@ +--- +name: smoke +type: charm +title: n/a +summary: n/a +description: n/a + +base: ubuntu@24.04 +build-base: ubuntu@24.04 +platforms: + arm64: + +parts: + charm: + plugin: charm + source: . diff --git a/tests/integration/ops-main-linter-charms/smoke/dispatch b/tests/integration/ops-main-linter-charms/smoke/dispatch new file mode 100755 index 000000000..1aa294960 --- /dev/null +++ b/tests/integration/ops-main-linter-charms/smoke/dispatch @@ -0,0 +1,4 @@ +#!/bin/sh + +JUJU_DISPATCH_PATH="${JUJU_DISPATCH_PATH:-$0}" PYTHONPATH=lib:venv \ + exec ./src/charm.py diff --git a/tests/integration/ops-main-linter-charms/smoke/metadata.yaml b/tests/integration/ops-main-linter-charms/smoke/metadata.yaml new file mode 100644 index 000000000..fe71596bd --- /dev/null +++ b/tests/integration/ops-main-linter-charms/smoke/metadata.yaml @@ -0,0 +1,11 @@ +--- +name: smoke +type: charm +title: n/a +summary: n/a +description: n/a + +base: ubuntu@24.04 +build-base: ubuntu@24.04 +platforms: + arm64: diff --git a/tests/integration/ops-main-linter-charms/smoke/src/charm.py b/tests/integration/ops-main-linter-charms/smoke/src/charm.py new file mode 100755 index 000000000..25429819f --- /dev/null +++ b/tests/integration/ops-main-linter-charms/smoke/src/charm.py @@ -0,0 +1,9 @@ +# Copyright 2024 Canonical Ltd. +import ops # type: ignore + + +class SomeCharm(ops.CharmBase): ... + + +if __name__ == "__main__": + ops.main(SomeCharm) diff --git a/tests/integration/ops-main-linter-charms/smoke/venv/ops/.gitkeep b/tests/integration/ops-main-linter-charms/smoke/venv/ops/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/tests/integration/parts/__init__.py b/tests/integration/parts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/integration/parts/conftest.py b/tests/integration/parts/conftest.py new file mode 100644 index 000000000..b6b5b1757 --- /dev/null +++ b/tests/integration/parts/conftest.py @@ -0,0 +1,40 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft + +import sys + +import craft_platforms +import distro +import pytest +from craft_application import models +from craft_providers import bases + +pytestmark = [ + pytest.mark.skipif(sys.platform != "linux", reason="craft-parts is linux-only") +] + + +@pytest.fixture +def build_plan() -> list[models.BuildInfo]: + arch = craft_platforms.DebianArchitecture.from_host().value + return [ + models.BuildInfo( + base=bases.BaseName(distro.id(), distro.version()), + build_on=arch, + build_for="arm64", + platform="distro-1-test64", + ) + ] diff --git a/tests/integration/parts/plugins/__init__.py b/tests/integration/parts/plugins/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/integration/parts/plugins/test_poetry.py b/tests/integration/parts/plugins/test_poetry.py new file mode 100644 index 000000000..a04939826 --- /dev/null +++ b/tests/integration/parts/plugins/test_poetry.py @@ -0,0 +1,91 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Integration tests for the Charmcraft-specific poetry plugin.""" + +import pathlib +import subprocess +import sys +from typing import Any + +import distro +import pytest +from craft_application import util + +from charmcraft import services +from charmcraft.models import project + +pytestmark = [ + pytest.mark.skipif(sys.platform != "linux", reason="craft-parts is linux-only") +] + + +@pytest.fixture +def charm_project( + basic_charm_dict: dict[str, Any], project_path: pathlib.Path, request +): + return project.PlatformCharm.unmarshal( + basic_charm_dict + | { + "base": f"{distro.id()}@{distro.version()}", + "platforms": {util.get_host_architecture(): None}, + "parts": { + "my-charm": { + "plugin": "poetry", + "source": str(project_path), + "source-type": "local", + } + }, + }, + ) + + +@pytest.fixture +def poetry_project(project_path: pathlib.Path) -> None: + subprocess.run( + [ + "poetry", + "init", + "--name=test-charm", + f"--directory={project_path}", + "--no-interaction", + ], + check=False, + ) + source_dir = project_path / "src" + source_dir.mkdir() + (source_dir / "charm.py").write_text("# Charm file") + + +@pytest.mark.usefixtures("poetry_project") +def test_poetry_plugin( + build_plan, + service_factory: services.CharmcraftServiceFactory, + tmp_path: pathlib.Path, +): + install_path = tmp_path / "parts" / "my-charm" / "install" + stage_path = tmp_path / "stage" + service_factory.lifecycle._build_plan = build_plan + + service_factory.lifecycle.run("stage") + + # Check that the part install directory looks correct. + assert (install_path / "src" / "charm.py").read_text() == "# Charm file" + assert (install_path / "venv" / "lib").is_dir() + + # Check that the stage directory looks correct. + assert (stage_path / "src" / "charm.py").read_text() == "# Charm file" + assert (stage_path / "venv" / "lib").is_dir() + assert not (stage_path / "venv" / "lib64").is_symlink() diff --git a/tests/integration/parts/plugins/test_python.py b/tests/integration/parts/plugins/test_python.py new file mode 100644 index 000000000..4d393333e --- /dev/null +++ b/tests/integration/parts/plugins/test_python.py @@ -0,0 +1,90 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Integration tests for the Charmcraft-specific python plugin.""" + +import pathlib +import sys +from typing import Any + +import distro +import pytest +from craft_application import util + +from charmcraft import services +from charmcraft.models import project + +pytestmark = [ + pytest.mark.skipif(sys.platform != "linux", reason="craft-parts is linux-only") +] + + +@pytest.fixture +def charm_project( + basic_charm_dict: dict[str, Any], project_path: pathlib.Path, request +): + return project.PlatformCharm.unmarshal( + basic_charm_dict + | { + "base": f"{distro.id()}@{distro.version()}", + "platforms": {util.get_host_architecture(): None}, + "parts": { + "my-charm": { + "plugin": "python", + "python-requirements": ["requirements.txt"], + "source": str(project_path), + "source-type": "local", + } + }, + }, + ) + + +@pytest.fixture +def python_project(project_path: pathlib.Path) -> None: + source_path = project_path / "src" + source_path.mkdir() + (source_path / "charm.py").write_text("# Charm file") + (project_path / "requirements.txt").write_text("distro==1.4.0") + + +@pytest.mark.usefixtures("python_project") +def test_python_plugin( + build_plan, + service_factory: services.CharmcraftServiceFactory, + tmp_path: pathlib.Path, +): + install_path = tmp_path / "parts" / "my-charm" / "install" + stage_path = tmp_path / "stage" + service_factory.lifecycle._build_plan = build_plan + + service_factory.lifecycle.run("stage") + + # Check that the part install directory looks correct. + assert (install_path / "src" / "charm.py").read_text() == "# Charm file" + assert (install_path / "venv" / "lib").is_dir() + assert ( + len( + list( + (install_path / "venv" / "lib").glob("python*/site-packages/distro.py") + ) + ) + == 1 + ) + + # Check that the stage directory looks correct. + assert (stage_path / "src" / "charm.py").read_text() == "# Charm file" + assert (stage_path / "venv" / "lib").is_dir() + assert not (stage_path / "venv" / "lib64").is_symlink() diff --git a/tests/integration/services/conftest.py b/tests/integration/services/conftest.py index 04704da92..96f232f06 100644 --- a/tests/integration/services/conftest.py +++ b/tests/integration/services/conftest.py @@ -14,10 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Configuration for services integration tests.""" -import contextlib -import sys -import pyfakefs.fake_filesystem import pytest from charmcraft import services @@ -25,20 +22,10 @@ @pytest.fixture -def service_factory( - fs: pyfakefs.fake_filesystem.FakeFilesystem, fake_path, simple_charm -) -> services.CharmcraftServiceFactory: - fake_project_dir = fake_path / "project" +def service_factory(simple_charm, new_path) -> services.CharmcraftServiceFactory: + fake_project_dir = new_path / "project" fake_project_dir.mkdir() - # Allow access to the real venv library path. - # This is necessary because certifi lazy-loads the certificate file. - for python_path in sys.path: - if not python_path: - continue - with contextlib.suppress(OSError): - fs.add_real_directory(python_path) - factory = services.CharmcraftServiceFactory(app=APP_METADATA) app = Charmcraft(app=APP_METADATA, services=factory) diff --git a/tests/integration/services/test_image.py b/tests/integration/services/test_image.py new file mode 100644 index 000000000..dc19d0d27 --- /dev/null +++ b/tests/integration/services/test_image.py @@ -0,0 +1,49 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Integration tests for the Image service.""" + +import sys + +import pytest + +from charmcraft import application, services + + +@pytest.fixture +def image_service() -> services.ImageService: + service = services.ImageService( + app=application.APP_METADATA, + services=None, # pyright: ignore[reportArgumentType] + ) + service.setup() + return service + + +@pytest.mark.parametrize( + "url", + [ + "docker://hello-world@sha256:18a657d0cc1c7d0678a3fbea8b7eb4918bba25968d3e1b0adebfa71caddbc346", + "hello-world@sha256:18a657d0cc1c7d0678a3fbea8b7eb4918bba25968d3e1b0adebfa71caddbc346", + "docker://ghcr.io/canonical/charmed-mysql@sha256:89b8305613f6ce94f78a7c9b4baedef78f2816fd6bc74c00f6607bc5e57bd8e6", + "docker://quay.io/prometheus/blackbox-exporter:v0.24.0", + "docker://quay.io/prometheus/blackbox-exporter:v0.24.0@sha256:3af31f8bd1ad2907b4b0f7c485fde3de0a8ee0b498d42fc971f0698885c03acb", + ], +) +@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") +def test_get_maybe_id_from_docker_no_exceptions( + image_service: services.ImageService, url +): + image_service.get_maybe_id_from_docker(url) diff --git a/tests/integration/services/test_lifecycle.py b/tests/integration/services/test_lifecycle.py index ff473a44b..f0c199858 100644 --- a/tests/integration/services/test_lifecycle.py +++ b/tests/integration/services/test_lifecycle.py @@ -15,7 +15,6 @@ # For further info, check https://github.com/canonical/charmcraft """Integration tests for the lifecycle service.""" - import distro import pytest from craft_application import errors, models, util diff --git a/tests/integration/services/test_package.py b/tests/integration/services/test_package.py index cfe9fc0b8..4dee96074 100644 --- a/tests/integration/services/test_package.py +++ b/tests/integration/services/test_package.py @@ -14,7 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Tests for package service.""" -import contextlib + import datetime import pathlib @@ -28,8 +28,8 @@ @pytest.fixture -def package_service(fake_path, service_factory, default_build_plan): - fake_project_dir = fake_path +def package_service(new_path: pathlib.Path, service_factory, default_build_plan): + fake_project_dir = new_path svc = services.PackageService( app=APP_METADATA, project=service_factory.project, @@ -48,16 +48,18 @@ def package_service(fake_path, service_factory, default_build_plan): for path in (pathlib.Path(__file__).parent / "sample_projects").iterdir() ], ) -@freezegun.freeze_time(datetime.datetime(2020, 3, 14, 0, 0, 0, tzinfo=datetime.timezone.utc)) -def test_write_metadata(monkeypatch, fs, package_service, project_path): +@freezegun.freeze_time( + datetime.datetime(2020, 3, 14, 0, 0, 0, tzinfo=datetime.timezone.utc) +) +def test_write_metadata(monkeypatch, new_path, package_service, project_path): monkeypatch.setattr(charmcraft, "__version__", "3.0-test-version") - with contextlib.suppress(FileExistsError): - fs.add_real_directory(project_path) - test_prime_dir = pathlib.Path("/prime") - fs.create_dir(test_prime_dir) + test_prime_dir = new_path / "prime" + test_prime_dir.mkdir() expected_prime_dir = project_path / "prime" - project = models.CharmcraftProject.from_yaml_file(project_path / "project" / "charmcraft.yaml") + project = models.CharmcraftProject.from_yaml_file( + project_path / "project" / "charmcraft.yaml" + ) project._started_at = datetime.datetime.now(tz=datetime.timezone.utc) package_service._project = project @@ -74,24 +76,26 @@ def test_write_metadata(monkeypatch, fs, package_service, project_path): for path in (pathlib.Path(__file__).parent / "sample_projects").iterdir() ], ) -@freezegun.freeze_time(datetime.datetime(2020, 3, 14, 0, 0, 0, tzinfo=datetime.timezone.utc)) -def test_overwrite_metadata(monkeypatch, fs, package_service, project_path): +@freezegun.freeze_time( + datetime.datetime(2020, 3, 14, 0, 0, 0, tzinfo=datetime.timezone.utc) +) +def test_overwrite_metadata(monkeypatch, new_path, package_service, project_path): """Test that the metadata file gets rewritten for a charm. Regression test for https://github.com/canonical/charmcraft/issues/1654 """ monkeypatch.setattr(charmcraft, "__version__", "3.0-test-version") - with contextlib.suppress(FileExistsError): - fs.add_real_directory(project_path) - test_prime_dir = pathlib.Path("/prime") - fs.create_dir(test_prime_dir) + test_prime_dir = new_path / "prime" + test_prime_dir.mkdir() expected_prime_dir = project_path / "prime" - project = models.CharmcraftProject.from_yaml_file(project_path / "project" / "charmcraft.yaml") + project = models.CharmcraftProject.from_yaml_file( + project_path / "project" / "charmcraft.yaml" + ) project._started_at = datetime.datetime.now(tz=datetime.timezone.utc) package_service._project = project - fs.create_file(test_prime_dir / const.METADATA_FILENAME, contents="INVALID!!") + (test_prime_dir / const.METADATA_FILENAME).write_text("INVALID!!") package_service.write_metadata(test_prime_dir) @@ -99,23 +103,25 @@ def test_overwrite_metadata(monkeypatch, fs, package_service, project_path): pytest_check.equal((test_prime_dir / file.name).read_text(), file.read_text()) -@freezegun.freeze_time(datetime.datetime(2020, 3, 14, 0, 0, 0, tzinfo=datetime.timezone.utc)) -def test_no_overwrite_reactive_metadata(monkeypatch, fs, package_service): +@freezegun.freeze_time( + datetime.datetime(2020, 3, 14, 0, 0, 0, tzinfo=datetime.timezone.utc) +) +def test_no_overwrite_reactive_metadata(monkeypatch, new_path, package_service): """Test that the metadata file doesn't get overwritten for a reactive charm.. Regression test for https://github.com/canonical/charmcraft/issues/1654 """ monkeypatch.setattr(charmcraft, "__version__", "3.0-test-version") project_path = pathlib.Path(__file__).parent / "sample_projects" / "basic-reactive" - with contextlib.suppress(FileExistsError): - fs.add_real_directory(project_path) - test_prime_dir = pathlib.Path("/prime") - fs.create_dir(test_prime_dir) - test_stage_dir = pathlib.Path("/stage") - fs.create_dir(test_stage_dir) - fs.create_file(test_stage_dir / const.METADATA_FILENAME, contents="INVALID!!") - - project = models.CharmcraftProject.from_yaml_file(project_path / "project" / "charmcraft.yaml") + test_prime_dir = new_path / "prime" + test_prime_dir.mkdir() + test_stage_dir = new_path / "stage" + test_stage_dir.mkdir() + (test_stage_dir / const.METADATA_FILENAME).write_text("INVALID!!") + + project = models.CharmcraftProject.from_yaml_file( + project_path / "project" / "charmcraft.yaml" + ) project._started_at = datetime.datetime.now(tz=datetime.timezone.utc) package_service._project = project diff --git a/tests/integration/services/test_provider.py b/tests/integration/services/test_provider.py new file mode 100644 index 000000000..484d06fcb --- /dev/null +++ b/tests/integration/services/test_provider.py @@ -0,0 +1,114 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Integration tests for the provider service.""" + +import pathlib +import shutil +import subprocess +import sys + +import pytest +from craft_application.models import BuildInfo +from craft_cli.pytest_plugin import RecordingEmitter + +from charmcraft import services +from charmcraft.services.provider import _maybe_lock_cache + + +@pytest.mark.skipif(sys.platform == "win32", reason="no cache on windows") +@pytest.mark.skipif( + sys.platform == "darwin", reason="multipass sometimes fails weirdly for this test" +) +def test_lock_cache( + service_factory: services.CharmcraftServiceFactory, + tmp_path: pathlib.Path, + default_build_info: BuildInfo, + emitter: RecordingEmitter, +): + cache_path = tmp_path / "cache" + cache_path.mkdir() + lock_file = cache_path / "charmcraft.lock" + bash_lock_cmd = ( + ["bash", "-c", f"flock -n {lock_file} true"] if shutil.which("flock") else None + ) + provider = service_factory.provider + provider_kwargs = { + "build_info": default_build_info, + "work_dir": tmp_path, + "cache_path": cache_path, + } + assert not lock_file.exists() + + with provider.instance(**provider_kwargs): + # Test that the cache lock gets created + assert lock_file.is_file() + if bash_lock_cmd: + with pytest.raises(subprocess.CalledProcessError): + # Another process should not be able to lock the file. + subprocess.run(bash_lock_cmd, check=True) + + # After exiting we should be able to lock the file. + if bash_lock_cmd: + subprocess.run(bash_lock_cmd, check=True) + + +@pytest.mark.skipif(sys.platform == "win32", reason="no cache on windows") +@pytest.mark.skipif( + sys.platform == "darwin", reason="multipass sometimes fails weirdly for this test" +) +def test_locked_cache_no_cache( + service_factory: services.CharmcraftServiceFactory, + tmp_path: pathlib.Path, + default_build_info: BuildInfo, + emitter: RecordingEmitter, +): + cache_path = tmp_path / "cache" + cache_path.mkdir() + lock_file = cache_path / "charmcraft.lock" + + bash_lock_cmd = ( + ["bash", "-c", f"flock -n {lock_file} true"] if shutil.which("flock") else None + ) + # Check that we can lock the file from another process. + if bash_lock_cmd: + subprocess.run(bash_lock_cmd, check=True) + _ = _maybe_lock_cache(cache_path) + # And now we can't. + if bash_lock_cmd: + with pytest.raises(subprocess.CalledProcessError): + subprocess.run(bash_lock_cmd, check=True) + + provider = service_factory.provider + provider_kwargs = { + "build_info": default_build_info, + "work_dir": tmp_path, + "cache_path": cache_path, + } + + with provider.instance(**provider_kwargs) as instance: + # Create a file in the cache and ensure it's not visible in the outer fs + instance.execute_run(["touch", "/root/.cache/cache_cached"]) + + # Because we've already locked the cache, we don't get a subdirectory in + # the cache, and thus the touch command inside there only affected the + # instance cache and not the shared cache. + assert list(cache_path.iterdir()) == [cache_path / "charmcraft.lock"] + emitter.assert_progress( + "Shared cache locked by another process; running without cache.", + permanent=True, + ) + + assert not (tmp_path / "cache_cached").exists() diff --git a/tests/integration/services/test_store.py b/tests/integration/services/test_store.py index f54fe91a1..5ca489063 100644 --- a/tests/integration/services/test_store.py +++ b/tests/integration/services/test_store.py @@ -15,7 +15,6 @@ # For further info, check https://github.com/canonical/charmcraft """Integration tests for the store service.""" - import pytest from charmcraft import models, services diff --git a/tests/integration/test_charm_builder.py b/tests/integration/test_charm_builder.py index 16c84e877..7859cd4ac 100644 --- a/tests/integration/test_charm_builder.py +++ b/tests/integration/test_charm_builder.py @@ -15,7 +15,6 @@ # For further info, check https://github.com/canonical/charmcraft """Integration tests for CharmBuilder.""" - import pathlib import sys diff --git a/tests/integration/test_linters.py b/tests/integration/test_linters.py new file mode 100644 index 000000000..97645f3fc --- /dev/null +++ b/tests/integration/test_linters.py @@ -0,0 +1,61 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Unit tests for linters.""" + +import pathlib +import subprocess +import sys + +import pytest + +from charmcraft import linters +from charmcraft.models.lint import LintResult + +pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Windows not supported") + + +@pytest.mark.parametrize( + "pip_cmd", + [ + ["--version"], + ["install", "pytest", "hypothesis"], + ], +) +@pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported") +def test_pip_check_success(tmp_path: pathlib.Path, pip_cmd: list[str]): + venv_path = tmp_path / "venv" + subprocess.run([sys.executable, "-m", "venv", venv_path], check=True) + subprocess.run([venv_path / "bin" / "python", "-m", "pip", *pip_cmd], check=True) + + lint = linters.PipCheck() + assert lint.run(tmp_path) == LintResult.OK + assert lint.text == linters.PipCheck.text + + +@pytest.mark.parametrize( + "pip_cmd", + [ + ["install", "--no-deps", "pydantic==2.9.2"], + ], +) +def test_pip_check_failure(tmp_path: pathlib.Path, pip_cmd: list[str]): + venv_path = tmp_path / "venv" + subprocess.run([sys.executable, "-m", "venv", venv_path], check=True) + subprocess.run([venv_path / "bin" / "python", "-m", "pip", *pip_cmd], check=True) + + lint = linters.PipCheck() + assert lint.run(tmp_path) == LintResult.WARNING + assert "pydantic 2.9.2 requires pydantic-core" in lint.text diff --git a/tests/integration/utils/test_skopeo.py b/tests/integration/utils/test_skopeo.py index 281827a5b..d2cbfb0d9 100644 --- a/tests/integration/utils/test_skopeo.py +++ b/tests/integration/utils/test_skopeo.py @@ -28,7 +28,8 @@ pytestmark = [ pytest.mark.skipif( - "CI" not in os.environ and not shutil.which("skopeo"), reason="skopeo not found in PATH" + "CI" not in os.environ and not shutil.which("skopeo"), + reason="skopeo not found in PATH", ), pytest.mark.xfail( platform.system().lower() not in ("linux", "darwin"), @@ -43,7 +44,11 @@ [ ("alpine", "docker://ghcr.io/containerd/alpine", "3.14.0"), ("debian12", "docker://gcr.io/distroless/base-debian12", "nonroot"), - ("mock-rock", "docker://ghcr.io/canonical/oci-factory/mock-rock", "1.2-22.04_279"), + ( + "mock-rock", + "docker://ghcr.io/canonical/oci-factory/mock-rock", + "1.2-22.04_279", + ), ("nanoserver", "docker://ghcr.io/containerd/nanoserver", "1809"), ], ) diff --git a/tests/spread/commands/init-flask-framework/task.yaml b/tests/spread/commands/init-extensions/task.yaml similarity index 67% rename from tests/spread/commands/init-flask-framework/task.yaml rename to tests/spread/commands/init-extensions/task.yaml index 4fdb20425..ee8c09ee0 100644 --- a/tests/spread/commands/init-flask-framework/task.yaml +++ b/tests/spread/commands/init-extensions/task.yaml @@ -1,9 +1,15 @@ -summary: test charmcraft init with flask-framework profile +summary: test charmcraft init with framework profiles priority: 500 # This builds pydantic, so do it early kill-timeout: 75m # Because it builds pydantic, it takes a long time. systems: # We only need to run this test once, and it takes a long time. - ubuntu-22.04-64 +environment: + PROFILE/flask: flask-framework + PROFILE/django: django-framework + PROFILE/go: go-framework + PROFILE/fastapi: fastapi-framework + CHARMCRAFT_ENABLE_EXPERIMENTAL_EXTENSIONS: "true" execute: | # Required for fetch-libs to succeed since the libraries are not available on @@ -14,7 +20,7 @@ execute: | mkdir -p test-init cd test-init - charmcraft init --profile flask-framework + charmcraft init --profile "${PROFILE}" charmcraft fetch-libs charmcraft pack --verbose test -f *.charm diff --git a/tests/spread/dependencies/charmlibs/charmcraft.yaml b/tests/spread/dependencies/charmlibs/charmcraft.yaml new file mode 100644 index 000000000..6bf50c3dd --- /dev/null +++ b/tests/spread/dependencies/charmlibs/charmcraft.yaml @@ -0,0 +1,18 @@ +type: charm +name: test-charm +summary: Test charm +description: Test charm + +base: ubuntu@24.04 + +platforms: + amd64: + arm64: + +charm-libs: + - lib: data-platform-libs.upgrade + version: "0.18" + +parts: + nothing: + plugin: nil diff --git a/tests/spread/dependencies/charmlibs/task.yaml b/tests/spread/dependencies/charmlibs/task.yaml new file mode 100644 index 000000000..c103ab26a --- /dev/null +++ b/tests/spread/dependencies/charmlibs/task.yaml @@ -0,0 +1,40 @@ +summary: test charmlibs dependencies download and update on pack + +environment: + # Use the real store. + CHARMCRAFT_STORE_API_URL: https://api.charmhub.io + # Ensure that the authentication variable is unset, testing anonymous store access. + CHARMCRAFT_AUTH: null + +include: + - tests/spread/dependencies + +kill-timeout: 30m + +prepare: | + mkdir -p charm + pushd charm + charmcraft init --profile=machine + popd + + cp charmcraft.yaml charm/charmcraft.yaml + +execute: | + pushd charm + charmcraft pack + test -f *.charm + test -f lib/charms/data_platform_libs/v0/upgrade.py + cat lib/charms/data_platform_libs/v0/upgrade.py | MATCH 'LIBPATCH = 18' + + sed -i 's/version: "0.18"/version: "0.10"/' charmcraft.yaml + charmcraft pack + # Should remain the same, no update. + cat lib/charms/data_platform_libs/v0/upgrade.py | MATCH 'LIBPATCH = 18' + +restore: | + rm -f ~/*.charm + + pushd charm + charmcraft clean + popd + rm -rf charm diff --git a/tests/spread/smoketests/basic/charmcraft-bases-22.04-all.yaml b/tests/spread/smoketests/basic/charmcraft-bases-22.04-all.yaml new file mode 100644 index 000000000..ae3c17d4c --- /dev/null +++ b/tests/spread/smoketests/basic/charmcraft-bases-22.04-all.yaml @@ -0,0 +1,18 @@ +name: jammy-all +type: charm +title: build for all +summary: A bases charm that's architecture independent. +description: A bases charm that's architecture independent. +bases: + - build-on: + - name: ubuntu + channel: "22.04" + run-on: + - name: ubuntu + channel: "22.04" + architectures: [all] + +parts: + my-charm: + plugin: charm + source: . diff --git a/tests/spread/smoketests/basic/charmcraft-platforms-24.04-all.yaml b/tests/spread/smoketests/basic/charmcraft-platforms-24.04-all.yaml new file mode 100644 index 000000000..36069a58c --- /dev/null +++ b/tests/spread/smoketests/basic/charmcraft-platforms-24.04-all.yaml @@ -0,0 +1,14 @@ +name: build-for-all +type: charm +title: Build for all +summary: A platforms charm that's architecture independent. +description: A platforms charm that's architecture independent. +base: ubuntu@24.04 +platforms: + all: + build-on: [amd64, arm64, riscv64] + build-for: [all] +parts: + my-charm: + plugin: charm + source: . diff --git a/tests/spread/smoketests/basic/charmcraft-platforms-24.04-python.yaml b/tests/spread/smoketests/basic/charmcraft-platforms-24.04-python.yaml new file mode 100644 index 000000000..0d585bf07 --- /dev/null +++ b/tests/spread/smoketests/basic/charmcraft-platforms-24.04-python.yaml @@ -0,0 +1,19 @@ +name: noble-platforms +type: charm +title: A charm built and running on noble. +summary: A charm built and running on noble. +description: A charm built and running on noble. +base: ubuntu@24.04 +platforms: + amd64: + arm64: + riscv64: + mainframe: + build-on: [s390x, ppc64el] + build-for: [s390x] +parts: + my-charm: + plugin: python + source: . + build-environment: + - PARTS_PYTHON_VENV_ARGS: "" diff --git a/tests/spread/smoketests/basic/task.yaml b/tests/spread/smoketests/basic/task.yaml index c4ea172c9..5b9660aa3 100644 --- a/tests/spread/smoketests/basic/task.yaml +++ b/tests/spread/smoketests/basic/task.yaml @@ -5,9 +5,15 @@ priority: 50 # Because these can take a while, run them early. environment: BASE_CHANNEL/focal_bases: 20.04 BASE_CHANNEL/jammy_bases,jammy_platforms: 22.04 - BASE_CHANNEL/noble_platforms: 24.04 + BASE_CHANNEL/noble_platforms,noble_platforms_python: 24.04 CHARM_TYPE/focal_bases,jammy_bases: bases - CHARM_TYPE/jammy_platforms,noble_platforms: platforms + CHARM_TYPE/jammy_platforms,noble_platforms,noble_platforms_python: platforms + BASE_CHANNEL/jammy_bases,jammy_bases_all,jammy_platforms: 22.04 + BASE_CHANNEL/noble_platforms,noble_platforms_all,noble_platforms_python: 24.04 + CHARM_TYPE/focal_bases,jammy_bases,jammy_bases_all: bases + CHARM_TYPE/jammy_platforms,noble_platforms,noble_platforms_all,noble_platforms_python: platforms + EXTRA/jammy_bases_all,noble_platforms_all: "-all" + EXTRA/noble_platforms_python: "-python" # Alma Linux is disabled temporarily: https://github.com/canonical/charmcraft/issues/1496 # BASE_CHANNEL/alma: alma9 # CHARM_TYPE/alma: bases @@ -17,7 +23,7 @@ include: prepare: | charmcraft init --project-dir=charm - cp charmcraft-$CHARM_TYPE-$BASE_CHANNEL.yaml charm/charmcraft.yaml + cp charmcraft-$CHARM_TYPE-$BASE_CHANNEL${EXTRA:+}.yaml charm/charmcraft.yaml restore: | pushd charm @@ -32,4 +38,11 @@ execute: | test -f *.charm unzip -l *.charm | MATCH "src/charm.py" unzip -l *.charm | MATCH "venv/ops/charm.py" + + if [[ ${EXTRA:+} == '-all' ]]; then + if [[ $(unzip -p *.charm manifest.yaml | yq '.bases[].architectures[]' | uniq) != 'all' ]]; then + ERROR "Charm does not have single architecture 'all'." + fi + fi + test ! -d build diff --git a/tests/spread/smoketests/parallel-install/charmcraft.yaml b/tests/spread/smoketests/parallel-install/charmcraft.yaml new file mode 100644 index 000000000..e46febc45 --- /dev/null +++ b/tests/spread/smoketests/parallel-install/charmcraft.yaml @@ -0,0 +1,14 @@ +type: charm +name: test-charm +summary: test-charm +description: test-charm + +base: ubuntu@24.04 +platforms: + amd64: + arm64: + riscv64: + +parts: + my-part: + plugin: nil diff --git a/tests/spread/smoketests/parallel-install/task.yaml b/tests/spread/smoketests/parallel-install/task.yaml new file mode 100644 index 000000000..2ab50ff66 --- /dev/null +++ b/tests/spread/smoketests/parallel-install/task.yaml @@ -0,0 +1,31 @@ +summary: pack a charm with parallel-installed charmcraft versions + +# Run last since we change snapd settings +priority: -10 + +prepare: | + snap install yq + if [[ $(snap get system experimental.parallel-instances) != true ]]; then + snap set system experimental.parallel-instances=true + REBOOT + fi + snap install --classic --channel=latest/candidate charmcraft + snap install --dangerous --classic --name=charmcraft_dev /charmcraft/charmcraft_*.snap + +restore: | + if [[ $(snap get system experimental.parallel-instances) == true ]]; then + snap remove charmcraft_dev + snap set system experimental.parallel-instances=false + REBOOT + fi + snap install --classic --dangerous /charmcraft/charmcraft_*.snap + +execute: | + # Check that the candidate version used the correct version + charmcraft pack + [[ $(unzip -p *.charm manifest.yaml | yq .charmcraft-version) == $(charmcraft --version | cut -f2 -d' ') ]] + rm *.charm + + # Try the dev version + charmcraft_dev pack + [[ $(unzip -p *.charm manifest.yaml | yq .charmcraft-version) == $(charmcraft_dev --version | cut -f2 -d' ') ]] diff --git a/tests/spread/store/credentials/task.yaml b/tests/spread/store/credentials/task.yaml index 057d1072a..2c3c22cb0 100644 --- a/tests/spread/store/credentials/task.yaml +++ b/tests/spread/store/credentials/task.yaml @@ -1,19 +1,10 @@ summary: validate that credentials are ok -prepare: | - mkdir charm - cd charm - - cat <<- EOF > charmcraft.yaml # only to configure to use staging - type: bundle - charmhub: - api-url: https://api.staging.charmhub.io - storage-url: https://storage.staging.snapcraftcontent.com - EOF - -restore: | - rm -rf charm +environment: + AUTH_KEY/default: "$(HOST: echo $CHARMCRAFT_AUTH)" + # Test that whoami works even with charm-scoped credentials. + # Regression test for https://github.com/canonical/charmcraft/issues/1869 + AUTH_KEY/charm_scoped: "$(HOST: echo $CHARMCRAFT_SINGLE_CHARM_AUTH)" execute: | - cd charm - charmcraft whoami + CHARMCRAFT_AUTH=${AUTH_KEY} charmcraft whoami diff --git a/tests/spread/store/name-registration/task.yaml b/tests/spread/store/name-registration/task.yaml index f329f5559..bd5d880fe 100644 --- a/tests/spread/store/name-registration/task.yaml +++ b/tests/spread/store/name-registration/task.yaml @@ -33,12 +33,5 @@ execute: | echo $package_type test $package_type == "charm" - charmcraft register-bundle $BUNDLE_NAME - package_type=$(charmcraft names --format=json | jq -r --arg bundle_name $BUNDLE_NAME '.[] | select(.name==$bundle_name) | .type') - echo $package_type - test $package_type == "bundle" - charmcraft unregister $CHARM_NAME - charmcraft unregister $BUNDLE_NAME charmcraft names | NOMATCH $CHARM_NAME - charmcraft names | NOMATCH $BUNDLE_NAME diff --git a/tests/spread/store/resources/task.yaml b/tests/spread/store/resources/task.yaml index c631faf9a..b55af71bf 100644 --- a/tests/spread/store/resources/task.yaml +++ b/tests/spread/store/resources/task.yaml @@ -106,9 +106,9 @@ execute: | test $oci_revision -ge $uploaded_revision # Expected to be newer because the OCI archive is normally slightly different from the Docker file. # Check that skopeo upload-resource works. - # TEMPORARILY DISABLED - # See: https://github.com/canonical/charmcraft/issues/1760 - # charmcraft upload-resource $CHARM_DEFAULT_NAME example-image --image=docker://hello-world@sha256:18a657d0cc1c7d0678a3fbea8b7eb4918bba25968d3e1b0adebfa71caddbc346 + charmcraft upload-resource $CHARM_DEFAULT_NAME example-image --image=docker://hello-world@sha256:18a657d0cc1c7d0678a3fbea8b7eb4918bba25968d3e1b0adebfa71caddbc346 + # Try with a file from the GitHub container registry. + charmcraft upload-resource $CHARM_DEFAULT_NAME example-image --image=docker://ghcr.io/infrastructure-as-code/hello-world@sha256:fe2e9828b415a64d1f65e1c6ddd29f53b26dea5d359c39a3c5ef9c96c1987a45 # release and check full status charmcraft release $CHARM_DEFAULT_NAME -r $last_charm_revno -c edge --resource=example-file:$last_file_revno --resource=example-image:$last_image_revno diff --git a/tests/test_bases.py b/tests/test_bases.py deleted file mode 100644 index be5743154..000000000 --- a/tests/test_bases.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright 2021 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# For further info, check https://github.com/canonical/charmcraft - - -from unittest.mock import patch - -import pytest - -from charmcraft.bases import check_if_base_matches_host, get_host_as_base -from charmcraft.models.charmcraft import Base -from charmcraft.utils import OSPlatform - - -@pytest.fixture -def mock_get_os_platform(): - os_platform = OSPlatform(system="host-OS", release="host-CHANNEL", machine="host-ARCH") - with patch("charmcraft.bases.get_os_platform", return_value=os_platform) as mock_platform: - yield mock_platform - - -@pytest.fixture -def mock_get_host_architecture(): - with patch( - "craft_application.util.get_host_architecture", return_value="host-ARCH" - ) as mock_host_arch: - yield mock_host_arch - - -def test_get_host_as_base(mock_get_os_platform, mock_get_host_architecture): - assert get_host_as_base() == Base( - name="host-os", - channel="host-CHANNEL", - architectures=["host-ARCH"], - ) - - -def test_check_if_bases_matches_host_matches(mock_get_os_platform, mock_get_host_architecture): - base = Base(name="host-os", channel="host-CHANNEL", architectures=["host-ARCH"]) - assert check_if_base_matches_host(base) == (True, None) - - base = Base( - name="host-os", - channel="host-CHANNEL", - architectures=["other-ARCH", "host-ARCH"], - ) - assert check_if_base_matches_host(base) == (True, None) - - -def test_check_if_bases_matches_host_name_mismatch( - mock_get_os_platform, mock_get_host_architecture -): - base = Base(name="test-other-os", channel="host-CHANNEL", architectures=["host-ARCH"]) - - assert check_if_base_matches_host(base) == ( - False, - "name 'test-other-os' does not match host 'host-os'", - ) - - -def test_check_if_bases_matches_host_channel_mismatch( - mock_get_os_platform, mock_get_host_architecture -): - base = Base(name="host-os", channel="other-CHANNEL", architectures=["host-ARCH"]) - - assert check_if_base_matches_host(base) == ( - False, - "channel 'other-CHANNEL' does not match host 'host-CHANNEL'", - ) - - -def test_check_if_bases_matches_host_arch_mismatch( - mock_get_os_platform, mock_get_host_architecture -): - base = Base( - name="host-os", - channel="host-CHANNEL", - architectures=["other-ARCH", "other-ARCH2"], - ) - - assert check_if_base_matches_host(base) == ( - False, - "host architecture 'host-ARCH' not in base architectures ['other-ARCH', 'other-ARCH2']", - ) diff --git a/tests/test_charm_builder.py b/tests/test_charm_builder.py index 52f3093bc..2c7436f89 100644 --- a/tests/test_charm_builder.py +++ b/tests/test_charm_builder.py @@ -583,7 +583,13 @@ def test_build_dispatcher_classic_hooks_linking_charm_replaced(tmp_path, assert_ @pytest.mark.parametrize( - ("python_packages", "binary_packages", "reqs_contents", "charmlibs", "expected_call_params"), + ( + "python_packages", + "binary_packages", + "reqs_contents", + "charmlibs", + "expected_call_params", + ), [ pytest.param( [], @@ -654,7 +660,14 @@ def test_build_dispatcher_classic_hooks_linking_charm_replaced(tmp_path, assert_ [], [], ["charmlib-dep"], - [["install", "--no-binary=:all:", "--requirement={reqs_file}", "charmlib-dep"]], + [ + [ + "install", + "--no-binary=:all:", + "--requirement={reqs_file}", + "charmlib-dep", + ] + ], id="charmlib-dep-only", ), pytest.param( @@ -685,7 +698,12 @@ def test_build_dispatcher_classic_hooks_linking_charm_replaced(tmp_path, assert_ [ ["install", "bin-pkg1", "duplicate"], ["install", "--no-binary=:all:", "duplicate", "pkg1"], - ["install", "--no-binary=:all:", "--requirement={reqs_file}", "lib-dep"], + [ + "install", + "--no-binary=:all:", + "--requirement={reqs_file}", + "lib-dep", + ], ], id="all-overlap", ), @@ -723,23 +741,28 @@ def test_build_dependencies_virtualenv( with patch("shutil.copytree") as mock_copytree: builder.handle_dependencies() - pip_cmd = str(charm_builder._find_venv_bin(tmp_path / const.STAGING_VENV_DIRNAME, "pip")) + pip_cmd = str( + charm_builder._find_venv_bin(tmp_path / const.STAGING_VENV_DIRNAME, "pip") + ) formatted_calls = [ - [param.format(reqs_file=str(reqs_file)) for param in call] for call in expected_call_params + [param.format(reqs_file=str(reqs_file)) for param in call] + for call in expected_call_params ] extra_pip_calls = [call([pip_cmd, *params]) for params in formatted_calls] assert mock.mock_calls == [ call(["python3", "-m", "venv", str(tmp_path / const.STAGING_VENV_DIRNAME)]), - call([pip_cmd, "install", f"pip@{KNOWN_GOOD_PIP_URL}"]), + call([pip_cmd, "install", "--force-reinstall", f"pip@{KNOWN_GOOD_PIP_URL}"]), *extra_pip_calls, ] site_packages_dir = charm_builder._find_venv_site_packages( pathlib.Path(const.STAGING_VENV_DIRNAME) ) - assert mock_copytree.mock_calls == [call(site_packages_dir, build_dir / const.VENV_DIRNAME)] + assert mock_copytree.mock_calls == [ + call(site_packages_dir, build_dir / const.VENV_DIRNAME) + ] assert_output("Handling dependencies", "Installing dependencies") @@ -768,10 +791,12 @@ def test_build_dependencies_virtualenv_multiple(tmp_path, assert_output): with patch("shutil.copytree") as mock_copytree: builder.handle_dependencies() - pip_cmd = str(charm_builder._find_venv_bin(tmp_path / const.STAGING_VENV_DIRNAME, "pip")) + pip_cmd = str( + charm_builder._find_venv_bin(tmp_path / const.STAGING_VENV_DIRNAME, "pip") + ) assert mock.mock_calls == [ call(["python3", "-m", "venv", str(tmp_path / const.STAGING_VENV_DIRNAME)]), - call([pip_cmd, "install", f"pip@{KNOWN_GOOD_PIP_URL}"]), + call([pip_cmd, "install", "--force-reinstall", f"pip@{KNOWN_GOOD_PIP_URL}"]), call( [ pip_cmd, @@ -786,7 +811,9 @@ def test_build_dependencies_virtualenv_multiple(tmp_path, assert_output): site_packages_dir = charm_builder._find_venv_site_packages( pathlib.Path(const.STAGING_VENV_DIRNAME) ) - assert mock_copytree.mock_calls == [call(site_packages_dir, build_dir / const.VENV_DIRNAME)] + assert mock_copytree.mock_calls == [ + call(site_packages_dir, build_dir / const.VENV_DIRNAME) + ] assert_output("Handling dependencies", "Installing dependencies") @@ -846,7 +873,9 @@ def test_build_dependencies_no_reused_missing_venv(tmp_path, assert_output): site_packages_dir = charm_builder._find_venv_site_packages( pathlib.Path(const.STAGING_VENV_DIRNAME) ) - assert mock_copytree.mock_calls == [call(site_packages_dir, build_dir / const.VENV_DIRNAME)] + assert mock_copytree.mock_calls == [ + call(site_packages_dir, build_dir / const.VENV_DIRNAME) + ] # remove the site venv directory staging_venv_dir.rmdir() @@ -867,7 +896,9 @@ def test_build_dependencies_no_reused_missing_venv(tmp_path, assert_output): site_packages_dir = charm_builder._find_venv_site_packages( pathlib.Path(const.STAGING_VENV_DIRNAME) ) - assert mock_copytree.mock_calls == [call(site_packages_dir, build_dir / const.VENV_DIRNAME)] + assert mock_copytree.mock_calls == [ + call(site_packages_dir, build_dir / const.VENV_DIRNAME) + ] def test_build_dependencies_no_reused_missing_hash_file(tmp_path, assert_output): @@ -905,7 +936,9 @@ def test_build_dependencies_no_reused_missing_hash_file(tmp_path, assert_output) site_packages_dir = charm_builder._find_venv_site_packages( pathlib.Path(const.STAGING_VENV_DIRNAME) ) - assert mock_copytree.mock_calls == [call(site_packages_dir, build_dir / const.VENV_DIRNAME)] + assert mock_copytree.mock_calls == [ + call(site_packages_dir, build_dir / const.VENV_DIRNAME) + ] # remove the hash file (tmp_path / const.DEPENDENCIES_HASH_FILENAME).unlink() @@ -926,7 +959,9 @@ def test_build_dependencies_no_reused_missing_hash_file(tmp_path, assert_output) site_packages_dir = charm_builder._find_venv_site_packages( pathlib.Path(const.STAGING_VENV_DIRNAME) ) - assert mock_copytree.mock_calls == [call(site_packages_dir, build_dir / const.VENV_DIRNAME)] + assert mock_copytree.mock_calls == [ + call(site_packages_dir, build_dir / const.VENV_DIRNAME) + ] def test_build_dependencies_no_reused_problematic_hash_file(tmp_path, assert_output): @@ -964,10 +999,14 @@ def test_build_dependencies_no_reused_problematic_hash_file(tmp_path, assert_out site_packages_dir = charm_builder._find_venv_site_packages( pathlib.Path(const.STAGING_VENV_DIRNAME) ) - assert mock_copytree.mock_calls == [call(site_packages_dir, build_dir / const.VENV_DIRNAME)] + assert mock_copytree.mock_calls == [ + call(site_packages_dir, build_dir / const.VENV_DIRNAME) + ] # avoid the file to be read successfully - (tmp_path / const.DEPENDENCIES_HASH_FILENAME).write_bytes(b"\xc3\x28") # invalid UTF8 + (tmp_path / const.DEPENDENCIES_HASH_FILENAME).write_bytes( + b"\xc3\x28" + ) # invalid UTF8 # second run! with patch("shutil.copytree") as mock_copytree: @@ -986,7 +1025,9 @@ def test_build_dependencies_no_reused_problematic_hash_file(tmp_path, assert_out site_packages_dir = charm_builder._find_venv_site_packages( pathlib.Path(const.STAGING_VENV_DIRNAME) ) - assert mock_copytree.mock_calls == [call(site_packages_dir, build_dir / const.VENV_DIRNAME)] + assert mock_copytree.mock_calls == [ + call(site_packages_dir, build_dir / const.VENV_DIRNAME) + ] @pytest.mark.parametrize( @@ -1049,7 +1090,9 @@ def test_build_dependencies_no_reused_different_dependencies( site_packages_dir = charm_builder._find_venv_site_packages( pathlib.Path(const.STAGING_VENV_DIRNAME) ) - assert mock_copytree.mock_calls == [call(site_packages_dir, build_dir / const.VENV_DIRNAME)] + assert mock_copytree.mock_calls == [ + call(site_packages_dir, build_dir / const.VENV_DIRNAME) + ] # for the second call, default new dependencies to first ones so only one is changed at a time if new_reqs_content is not None: @@ -1076,7 +1119,9 @@ def test_build_dependencies_no_reused_different_dependencies( site_packages_dir = charm_builder._find_venv_site_packages( pathlib.Path(const.STAGING_VENV_DIRNAME) ) - assert mock_copytree.mock_calls == [call(site_packages_dir, build_dir / const.VENV_DIRNAME)] + assert mock_copytree.mock_calls == [ + call(site_packages_dir, build_dir / const.VENV_DIRNAME) + ] def test_build_dependencies_reused(tmp_path, assert_output): @@ -1118,7 +1163,9 @@ def test_build_dependencies_reused(tmp_path, assert_output): site_packages_dir = charm_builder._find_venv_site_packages( pathlib.Path(const.STAGING_VENV_DIRNAME) ) - assert mock_copytree.mock_calls == [call(site_packages_dir, build_dir / const.VENV_DIRNAME)] + assert mock_copytree.mock_calls == [ + call(site_packages_dir, build_dir / const.VENV_DIRNAME) + ] # second run! with patch("shutil.copytree") as mock_copytree: @@ -1133,7 +1180,9 @@ def test_build_dependencies_reused(tmp_path, assert_output): site_packages_dir = charm_builder._find_venv_site_packages( pathlib.Path(const.STAGING_VENV_DIRNAME) ) - assert mock_copytree.mock_calls == [call(site_packages_dir, build_dir / const.VENV_DIRNAME)] + assert mock_copytree.mock_calls == [ + call(site_packages_dir, build_dir / const.VENV_DIRNAME) + ] # -- tests about juju ignore @@ -1192,8 +1241,12 @@ def mock_build_charm(self): fake_argv = ["cmd", "--builddir", "builddir", "--installdir", "installdir"] with patch.object(sys, "argv", fake_argv): - with patch("charmcraft.charm_builder.CharmBuilder.build_charm", new=mock_build_charm): - with patch("charmcraft.charm_builder.collect_charmlib_pydeps") as mock_collect_pydeps: + with patch( + "charmcraft.charm_builder.CharmBuilder.build_charm", new=mock_build_charm + ): + with patch( + "charmcraft.charm_builder.collect_charmlib_pydeps" + ) as mock_collect_pydeps: with pytest.raises(SystemExit) as raised: charm_builder.main() assert raised.value.code == 42 @@ -1216,8 +1269,12 @@ def mock_build_charm(self): fake_argv = ["cmd", "--builddir", "builddir", "--installdir", "installdir"] fake_argv += ["-rreqs1.txt", "--requirement", "reqs2.txt"] with patch.object(sys, "argv", fake_argv): - with patch("charmcraft.charm_builder.CharmBuilder.build_charm", new=mock_build_charm): - with patch("charmcraft.charm_builder.collect_charmlib_pydeps") as mock_collect_pydeps: + with patch( + "charmcraft.charm_builder.CharmBuilder.build_charm", new=mock_build_charm + ): + with patch( + "charmcraft.charm_builder.collect_charmlib_pydeps" + ) as mock_collect_pydeps: with pytest.raises(SystemExit) as raised: charm_builder.main() assert raised.value.code == 42 diff --git a/tests/test_infra.py b/tests/test_infra.py index 0a1264c6e..56f26eca4 100644 --- a/tests/test_infra.py +++ b/tests/test_infra.py @@ -55,5 +55,7 @@ def test_ensure_copyright(): else: issues.append(filepath) if issues: - msg = "Please add copyright headers to the following files:\n" + "\n".join(issues) + msg = "Please add copyright headers to the following files:\n" + "\n".join( + issues + ) pytest.fail(msg, pytrace=False) diff --git a/tests/test_instrum.py b/tests/test_instrum.py index b1602857b..571414168 100644 --- a/tests/test_instrum.py +++ b/tests/test_instrum.py @@ -97,7 +97,10 @@ def test_measurement_extra_info_complex(): weird_object = object() mid = measurements.start("test msg", {"foo": 42, "bar": weird_object}) - assert measurements.measurements[mid]["extra"] == {"foo": "42", "bar": str(weird_object)} + assert measurements.measurements[mid]["extra"] == { + "foo": "42", + "bar": str(weird_object), + } def test_measurement_overlapped_measurements(): @@ -224,7 +227,9 @@ def test_measurement_merge_complex(tmp_path, fake_times): # merge from it and check merged structure measurements_outer.merge_from(measures_filepath) merged_1 = measurements_outer.measurements[mid_inner_1] - assert merged_1["parent"] == mid_outer_2 # the parent is the "current" outer measure + assert ( + merged_1["parent"] == mid_outer_2 + ) # the parent is the "current" outer measure assert merged_1["tstart"] == 25 # back to absolute assert merged_1["tend"] == 55 # back to absolute merged_2 = measurements_outer.measurements[mid_inner_2] @@ -232,7 +237,9 @@ def test_measurement_merge_complex(tmp_path, fake_times): assert merged_2["tstart"] == 35 # back to absolute assert merged_2["tend"] == 45 # back to absolute merged_3 = measurements_outer.measurements[mid_inner_3] - assert merged_3["parent"] == mid_outer_2 # the parent is the "current" outer measure + assert ( + merged_3["parent"] == mid_outer_2 + ) # the parent is the "current" outer measure assert merged_3["tstart"] == 65 # back to absolute assert merged_3["tend"] == 75 # back to absolute diff --git a/tests/test_linters.py b/tests/test_linters.py index 6d40ca095..3fb5296d0 100644 --- a/tests/test_linters.py +++ b/tests/test_linters.py @@ -33,6 +33,7 @@ JujuMetadata, Language, NamingConventions, + OpsMainCall, check_dispatch_with_python_entrypoint, get_entrypoint_from_dispatch, ) @@ -75,7 +76,7 @@ def test_epfromdispatch_inaccessible_dispatch(tmp_path): def test_epfromdispatch_broken_dispatch(tmp_path): """The charm has a dispatch which we can't decode.""" dispatch = tmp_path / const.DISPATCH_FILENAME - dispatch.write_bytes(b"\xC0\xC0") + dispatch.write_bytes(b"\xc0\xc0") result = get_entrypoint_from_dispatch(tmp_path) assert result is None @@ -92,7 +93,9 @@ def test_checkdispatchpython_python_ok(tmp_path): """The charm is written in Python.""" entrypoint = tmp_path / "charm.py" entrypoint.touch(mode=0o700) - with patch("charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint): + with patch( + "charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint + ): result = check_dispatch_with_python_entrypoint(tmp_path) assert result == entrypoint @@ -100,7 +103,9 @@ def test_checkdispatchpython_python_ok(tmp_path): def test_checkdispatchpython_no_entrypoint(tmp_path): """Cannot find the entrypoint used in dispatch.""" entrypoint = tmp_path / "charm.py" - with patch("charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint): + with patch( + "charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint + ): result = check_dispatch_with_python_entrypoint(tmp_path) assert result is None @@ -123,7 +128,9 @@ def test_checkdispatchpython_entrypoint_is_not_python(tmp_path): ) entrypoint = tmp_path / "charm" entrypoint.touch(mode=0o700) - with patch("charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint): + with patch( + "charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint + ): result = check_dispatch_with_python_entrypoint(tmp_path) assert result is None @@ -135,7 +142,9 @@ def test_checkdispatchpython_entrypoint_no_exec(tmp_path): dispatch.write_text(EXAMPLE_DISPATCH) entrypoint = tmp_path / "charm.py" entrypoint.touch() - with patch("charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint): + with patch( + "charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint + ): result = check_dispatch_with_python_entrypoint(tmp_path) assert result is None @@ -145,7 +154,9 @@ def test_checkdispatchpython_entrypoint_no_exec(tmp_path): def test_language_python(): """The charm is written in Python.""" - with patch("charmcraft.linters.check_dispatch_with_python_entrypoint") as mock_check: + with patch( + "charmcraft.linters.check_dispatch_with_python_entrypoint" + ) as mock_check: mock_check.return_value = pathlib.Path("entrypoint") result = Language().run(pathlib.Path("somedir")) assert result == Language.Result.PYTHON @@ -154,7 +165,9 @@ def test_language_python(): def test_language_no_dispatch(tmp_path): """The charm has no dispatch at all.""" - with patch("charmcraft.linters.check_dispatch_with_python_entrypoint") as mock_check: + with patch( + "charmcraft.linters.check_dispatch_with_python_entrypoint" + ) as mock_check: mock_check.return_value = None result = Language().run(pathlib.Path("somedir")) assert result == Language.Result.UNKNOWN @@ -213,7 +226,9 @@ def test_framework_operator_used_ok(tmp_path, import_line): opsdir.mkdir(parents=True) # check - with patch("charmcraft.linters.check_dispatch_with_python_entrypoint") as mock_check: + with patch( + "charmcraft.linters.check_dispatch_with_python_entrypoint" + ) as mock_check: mock_check.return_value = pathlib.Path(entrypoint) result = Framework()._check_operator(tmp_path) assert result is True @@ -231,7 +246,9 @@ def test_framework_operator_language_not_python(tmp_path): opsdir.mkdir(parents=True) # check - with patch("charmcraft.linters.check_dispatch_with_python_entrypoint") as mock_check: + with patch( + "charmcraft.linters.check_dispatch_with_python_entrypoint" + ) as mock_check: mock_check.return_value = None result = Framework()._check_operator(tmp_path) assert result is False @@ -244,7 +261,9 @@ def test_framework_operator_venv_directory_missing(tmp_path): entrypoint.write_text("import ops") # check - with patch("charmcraft.linters.check_dispatch_with_python_entrypoint") as mock_check: + with patch( + "charmcraft.linters.check_dispatch_with_python_entrypoint" + ) as mock_check: mock_check.return_value = pathlib.Path(entrypoint) result = Framework()._check_operator(tmp_path) assert result is False @@ -261,7 +280,9 @@ def test_framework_operator_no_venv_ops_directory(tmp_path): venvdir.mkdir() # check - with patch("charmcraft.linters.check_dispatch_with_python_entrypoint") as mock_check: + with patch( + "charmcraft.linters.check_dispatch_with_python_entrypoint" + ) as mock_check: mock_check.return_value = pathlib.Path(entrypoint) result = Framework()._check_operator(tmp_path) assert result is False @@ -279,7 +300,9 @@ def test_framework_operator_venv_ops_directory_is_not_a_dir(tmp_path): opsfile.touch() # check - with patch("charmcraft.linters.check_dispatch_with_python_entrypoint") as mock_check: + with patch( + "charmcraft.linters.check_dispatch_with_python_entrypoint" + ) as mock_check: mock_check.return_value = pathlib.Path(entrypoint) result = Framework()._check_operator(tmp_path) assert result is False @@ -296,7 +319,9 @@ def test_framework_operator_corrupted_entrypoint(tmp_path): opsdir.mkdir(parents=True) # check - with patch("charmcraft.linters.check_dispatch_with_python_entrypoint") as mock_check: + with patch( + "charmcraft.linters.check_dispatch_with_python_entrypoint" + ) as mock_check: mock_check.return_value = pathlib.Path(entrypoint) result = Framework()._check_operator(tmp_path) assert result is False @@ -322,7 +347,9 @@ def test_framework_operator_no_ops_imported(tmp_path, monkeypatch, import_line): opsdir.mkdir(parents=True) # check - with patch("charmcraft.linters.check_dispatch_with_python_entrypoint") as mock_check: + with patch( + "charmcraft.linters.check_dispatch_with_python_entrypoint" + ) as mock_check: mock_check.return_value = pathlib.Path(entrypoint) result = Framework()._check_operator(tmp_path) assert result is False @@ -843,7 +870,10 @@ def test_jujuconfig_no_type_in_options_items(tmp_path): linter = JujuConfig() result = linter.run(tmp_path) assert result == JujuConfig.Result.ERROR - assert linter.text == "Error in config.yaml: items under 'options' must have a 'type' key." + assert ( + linter.text + == "Error in config.yaml: items under 'options' must have a 'type' key." + ) @pytest.mark.parametrize( @@ -945,7 +975,9 @@ def test_entrypoint_missing(tmp_path): """The file does not exist.""" entrypoint = tmp_path / "charm" linter = Entrypoint() - with patch("charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint): + with patch( + "charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint + ): result = linter.run(tmp_path) assert result == Entrypoint.Result.ERROR assert linter.text == f"Cannot find the entrypoint file: {str(entrypoint)!r}" @@ -956,7 +988,9 @@ def test_entrypoint_directory(tmp_path): entrypoint = tmp_path / "charm" entrypoint.mkdir() linter = Entrypoint() - with patch("charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint): + with patch( + "charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint + ): result = linter.run(tmp_path) assert result == Entrypoint.Result.ERROR assert linter.text == f"The entrypoint is not a file: {str(entrypoint)!r}" @@ -968,7 +1002,9 @@ def test_entrypoint_non_exec(tmp_path): entrypoint = tmp_path / "charm" entrypoint.touch() linter = Entrypoint() - with patch("charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint): + with patch( + "charmcraft.linters.get_entrypoint_from_dispatch", return_value=entrypoint + ): result = linter.run(tmp_path) assert result == Entrypoint.Result.ERROR assert linter.text == f"The entrypoint file is not executable: {str(entrypoint)!r}" @@ -1029,7 +1065,10 @@ def test_additional_files_checker_not_applicable(tmp_path): result = linter.run(prime_dir) assert result == LintResult.NONAPPLICABLE - assert linter.text == "Additional files check not applicable without a build environment." + assert ( + linter.text + == "Additional files check not applicable without a build environment." + ) @pytest.mark.parametrize( @@ -1066,3 +1105,149 @@ def test_additional_files_checker_generated_ignore(tmp_path, file): assert result == LintResult.OK assert linter.text == "No additional files found in the charm." + + +CODE_SAMPLES = { + "canonical example": dedent( + """ + import ops + if __name__ == "__main__": + ops.main(SomeCharm) + """ + ), + "recommended import style": dedent( + """ + import ops + ops.main(SomeCharm) + """ + ), + "recommended import style, legacy call": dedent( + """ + import ops + ops.main.main(SomeCharm) + """ + ), + "call with kwarg": dedent( + """ + import ops + ops.main(charm_class=SomeCharm) + """ + ), + "import side effect": dedent( + """ + import ops.charm # makes `ops` visible + ops.main(SomeCharm) + """ + ), + "import alias": dedent( + """ + import ops as mops + mops.main(SomeCharm) + """ + ), + "function import": dedent( + """ + from ops import main + main(SomeCharm) + """ + ), + "function import, legacy call": dedent( + """ + from ops import main + main.main(SomeCharm) + """ + ), + "submodule import": dedent( + """ + import ops.main + ops.main(SomeCharm) # type: ignore + """ + ), + "submodule import, legacy call": dedent( + """ + import ops.main + ops.main.main(SomeCharm) + """ + ), + "multiple imports, simple": dedent( + """ + import ops + import ops.main + ops.main(SomeCharm) + """ + ), + "multiple imports, earlier": dedent( + """ + import ops + from ops.main import main + ops.main(SomeCharm) + """ + ), + "multiple imports, latter": dedent( + """ + import ops + from ops.main import main + main(SomeCharm) + """ + ), + "function import from submodule": dedent( + """ + from ops.main import main + main(SomeCharm) + """ + ), + "function alias import from submodule": dedent( + """ + from ops.main import main as alias + alias(SomeCharm) + """ + ), +} + + +@pytest.mark.parametrize( + "code", + [pytest.param(v, id=k) for k, v in CODE_SAMPLES.items()], +) +def test_ops_main(code: str): + assert OpsMainCall()._check_main_calls(code) + + +NEGATIVE_CODE_SAMPLES = { + "missing ops import": dedent( + """ + ops.main(SomeCharm) + """ + ), + "missing main call": dedent( + """ + import ops + """ + ), + "wrong import alias": dedent( + """ + import ops as oops + ops.main(SomeCharm) + """ + ), + "no side effect from an alias": dedent( + """ + import ops.charm as the_charm + ops.main(SomeCharm) + """ + ), + "wrong function alias import from submodule": dedent( + """ + from ops.main import main as whatchamacallit + main(SomeCharm) + """ + ), +} + + +@pytest.mark.parametrize( + "code", + [pytest.param(v, id=k) for k, v in NEGATIVE_CODE_SAMPLES.items()], +) +def test_ops_main_negative(code: str): + assert not OpsMainCall()._check_main_calls(code) diff --git a/tests/test_parts.py b/tests/test_parts.py index 55fe754fe..c6ba2a318 100644 --- a/tests/test_parts.py +++ b/tests/test_parts.py @@ -21,7 +21,9 @@ from charmcraft import parts -pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported") +pytestmark = pytest.mark.skipif( + sys.platform == "win32", reason="Windows not [yet] supported" +) # -- tests for part config processing diff --git a/tests/test_snap.py b/tests/test_snap.py index 8ed617a19..7cdbe9703 100644 --- a/tests/test_snap.py +++ b/tests/test_snap.py @@ -29,7 +29,9 @@ @pytest.fixture def mock_snap_config(): - with mock.patch("charmcraft.snap.snaphelpers.SnapConfig", autospec=True) as mock_snap_config: + with mock.patch( + "charmcraft.snap.snaphelpers.SnapConfig", autospec=True + ) as mock_snap_config: yield mock_snap_config @@ -72,5 +74,7 @@ def fake_get(key: str): assert snap_config == CharmcraftSnapConfiguration(provider=provider) assert snap_config.provider == provider - with pytest.raises(ValueError, match=re.escape(f"provider {provider!r} is not supported")): + with pytest.raises( + ValueError, match=re.escape(f"provider {provider!r} is not supported") + ): validate_snap_configuration(snap_config) diff --git a/tests/unit/commands/test_lifecycle.py b/tests/unit/commands/test_lifecycle.py index 04a210faa..2fc1975f7 100644 --- a/tests/unit/commands/test_lifecycle.py +++ b/tests/unit/commands/test_lifecycle.py @@ -14,14 +14,18 @@ # # For further info, check https://github.com/canonical/charmcraft """Unit tests for lifecycle commands.""" + import argparse import pathlib import craft_cli import pytest +import pytest_check +from craft_cli.pytest_plugin import RecordingEmitter -from charmcraft import application, services +from charmcraft import application, models, services, utils from charmcraft.application.commands import lifecycle +from charmcraft.store.models import Library def get_namespace( @@ -60,7 +64,9 @@ def get_namespace( @pytest.fixture def pack(service_factory: services.ServiceFactory) -> lifecycle.PackCommand: - return lifecycle.PackCommand({"app": application.APP_METADATA, "services": service_factory}) + return lifecycle.PackCommand( + {"app": application.APP_METADATA, "services": service_factory} + ) @pytest.mark.parametrize( @@ -119,3 +125,76 @@ def test_pack_invalid_arguments( pack.run(command_args) assert exc_info.value.args[0].startswith(message_start) + + +def test_pack_update_charm_libs_empty( + fake_project_dir: pathlib.Path, + pack: lifecycle.PackCommand, + simple_charm, + emitter: RecordingEmitter, + service_factory: services.ServiceFactory, +): + simple_charm.charm_libs = [models.CharmLib(lib="my_charm.my_lib", version="0.1")] + store_lib = Library("lib_id", "my_lib", "my_charm", 0, 1, "Lib contents", "hash") + service_factory.store.anonymous_client.fetch_libraries_metadata.return_value = [ + store_lib + ] + service_factory.store.anonymous_client.get_library.return_value = store_lib + + pack._update_charm_libs() + + with pytest_check.check(): + emitter.assert_debug(repr(store_lib)) + + path = fake_project_dir / utils.get_lib_path("my_charm", "my_lib", 0) + assert path.read_text() == "Lib contents" + + +def test_pack_update_charm_libs_no_update( + fake_project_dir: pathlib.Path, + pack: lifecycle.PackCommand, + simple_charm, + emitter: RecordingEmitter, + service_factory: services.ServiceFactory, +): + simple_charm.charm_libs = [models.CharmLib(lib="my_charm.my_lib", version="0.1")] + store_lib = Library("lib_id", "my_lib", "my_charm", 0, 1, "Lib contents", "hash") + path = fake_project_dir / utils.get_lib_path("my_charm", "my_lib", 0) + path.parent.mkdir(parents=True) + path.write_text("LIBID='id'\nLIBAPI=0\nLIBPATCH=1") + service_factory.store.anonymous_client.fetch_libraries_metadata.return_value = [ + store_lib + ] + service_factory.store.anonymous_client.get_library.return_value = store_lib + + pack._update_charm_libs() + + with pytest.raises(AssertionError): + emitter.assert_debug(repr(store_lib)) + + assert path.read_text() != "Lib contents" + + +def test_pack_update_charm_libs_needs_update( + fake_project_dir: pathlib.Path, + pack: lifecycle.PackCommand, + simple_charm, + emitter: RecordingEmitter, + service_factory: services.ServiceFactory, +): + simple_charm.charm_libs = [models.CharmLib(lib="my_charm.my_lib", version="0.2")] + store_lib = Library("lib_id", "my_lib", "my_charm", 0, 2, "Lib contents", "hash") + path = fake_project_dir / utils.get_lib_path("my_charm", "my_lib", 0) + path.parent.mkdir(parents=True) + path.write_text("LIBID='id'\nLIBAPI=0\nLIBPATCH=1") + service_factory.store.anonymous_client.fetch_libraries_metadata.return_value = [ + store_lib + ] + service_factory.store.anonymous_client.get_library.return_value = store_lib + + pack._update_charm_libs() + + with pytest.raises(AssertionError): + emitter.assert_debug(repr(store_lib)) + + assert path.read_text() != "Lib contents" diff --git a/tests/unit/commands/test_store.py b/tests/unit/commands/test_store.py index 774b3b376..9d68d4bfd 100644 --- a/tests/unit/commands/test_store.py +++ b/tests/unit/commands/test_store.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Unit tests for store commands.""" + import argparse import datetime import pathlib @@ -22,10 +23,12 @@ import craft_cli.pytest_plugin import craft_store +import freezegun import pytest from craft_store import models from charmcraft import errors, store +from charmcraft.application import commands from charmcraft.application.commands import SetResourceArchitecturesCommand from charmcraft.application.commands.store import FetchLibs, LoginCommand from charmcraft.application.main import APP_METADATA @@ -59,7 +62,14 @@ def test_login_basic_no_export(service_factory, mock_store_client): @pytest.mark.parametrize("permission", [None, [], ["package-manage"]]) @pytest.mark.parametrize("ttl", [None, 0, 2**65]) def test_login_export( - monkeypatch, service_factory, mock_store_client, charm, bundle, channel, permission, ttl + monkeypatch, + service_factory, + mock_store_client, + charm, + bundle, + channel, + permission, + ttl, ): mock_client_cls = mock.Mock(return_value=mock_store_client) monkeypatch.setattr(craft_store, "StoreClient", mock_client_cls) @@ -93,7 +103,13 @@ def test_login_export( bases=[models.ResponseCharmResourceBase()], ) ], - [{"revision": 123, "updated_at": "1900-01-01T00:00:00", "architectures": ["all"]}], + [ + { + "revision": 123, + "updated_at": "1900-01-01T00:00:00", + "architectures": ["all"], + } + ], ), ], ) @@ -211,9 +227,13 @@ def test_fetch_libs_missing_from_store(service_factory, libs, expected): ), ], ) -def test_fetch_libs_no_content(new_path, service_factory, libs, store_libs, dl_lib, expected): +def test_fetch_libs_no_content( + new_path, service_factory, libs, store_libs, dl_lib, expected +): service_factory.project.charm_libs = libs - service_factory.store.anonymous_client.fetch_libraries_metadata.return_value = store_libs + service_factory.store.anonymous_client.fetch_libraries_metadata.return_value = ( + store_libs + ) service_factory.store.anonymous_client.get_library.return_value = dl_lib fetch_libs = FetchLibs({"app": APP_METADATA, "services": service_factory}) @@ -256,7 +276,9 @@ def test_fetch_libs_success( new_path, emitter, service_factory, libs, store_libs, dl_lib, expected ) -> None: service_factory.project.charm_libs = libs - service_factory.store.anonymous_client.fetch_libraries_metadata.return_value = store_libs + service_factory.store.anonymous_client.fetch_libraries_metadata.return_value = ( + store_libs + ) service_factory.store.anonymous_client.get_library.return_value = dl_lib fetch_libs = FetchLibs({"app": APP_METADATA, "services": service_factory}) @@ -264,3 +286,37 @@ def test_fetch_libs_success( emitter.assert_progress("Getting library metadata from charmhub") emitter.assert_message("Downloaded 1 charm libraries.") + + +@freezegun.freeze_time("2024-10-31") +def test_register_bundle_warning(monkeypatch: pytest.MonkeyPatch, emitter): + mock_store = mock.Mock() + monkeypatch.setattr("charmcraft.application.commands.store.Store", mock_store) + + parsed_args = argparse.Namespace(name="name") + cmd = commands.RegisterBundleNameCommand(None) + cmd.run(parsed_args) + + emitter.assert_progress( + "\u001b[31mWARNING:\u001b[0m New bundle registration will stop working on 2024-11-01. For " + f"more information, see: {commands.store.BUNDLE_REGISTRATION_REMOVAL_URL}", + permanent=True, + ) + mock_store.assert_called() + + +@freezegun.freeze_time("2024-11-01") +def test_register_bundle_error(monkeypatch: pytest.MonkeyPatch, emitter): + mock_store = mock.Mock() + monkeypatch.setattr("charmcraft.application.commands.store.Store", mock_store) + + parsed_args = argparse.Namespace(name="name") + cmd = commands.RegisterBundleNameCommand(None) + + assert cmd.run(parsed_args) == 1 + + emitter.assert_message( + "\u001b[31mERROR:\u001b[0m New bundle registration is discontinued as of 2024-11-01. For " + f"more information, see: {commands.store.BUNDLE_REGISTRATION_REMOVAL_URL}", + ) + mock_store.assert_not_called() diff --git a/tests/unit/models/test_charmcraft.py b/tests/unit/models/test_charmcraft.py index 719067198..a0a40b17b 100644 --- a/tests/unit/models/test_charmcraft.py +++ b/tests/unit/models/test_charmcraft.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Tests for Charmcraft models.""" + import pytest from charmcraft.models import charmcraft @@ -22,9 +23,18 @@ @pytest.mark.parametrize( ("base_str", "expected"), [ - ("ubuntu@24.04", charmcraft.Base(name="ubuntu", channel="24.04", architectures=[])), - ("ubuntu@22.04", charmcraft.Base(name="ubuntu", channel="22.04", architectures=[])), - ("almalinux@9", charmcraft.Base(name="almalinux", channel="9", architectures=[])), + ( + "ubuntu@24.04", + charmcraft.Base(name="ubuntu", channel="24.04", architectures=[]), + ), + ( + "ubuntu@22.04", + charmcraft.Base(name="ubuntu", channel="22.04", architectures=[]), + ), + ( + "almalinux@9", + charmcraft.Base(name="almalinux", channel="9", architectures=[]), + ), ], ) def test_get_base_from_str_and_arch(base_str, expected): diff --git a/tests/unit/models/test_config.py b/tests/unit/models/test_config.py index e929d848e..689aac06c 100644 --- a/tests/unit/models/test_config.py +++ b/tests/unit/models/test_config.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Tests for the config model.""" + import math import pydantic @@ -36,7 +37,10 @@ { "favourite integer": {"type": "int"}, "favourite number": {"type": "float", "default": math.pi}, - "catchphrase": {"type": "string", "description": "What's your catchphrase?"}, + "catchphrase": { + "type": "string", + "description": "What's your catchphrase?", + }, "default_answer": { "type": "boolean", "description": "Yes/no true or false", @@ -46,7 +50,9 @@ ], ) def test_valid_config(options): - assert JujuConfig.model_validate({"options": options}) == JujuConfig(options=options) + assert JujuConfig.model_validate({"options": options}) == JujuConfig( + options=options + ) def test_empty_config(): diff --git a/tests/unit/models/test_metadata.py b/tests/unit/models/test_metadata.py index 63fdeaa50..1a2fbe608 100644 --- a/tests/unit/models/test_metadata.py +++ b/tests/unit/models/test_metadata.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Tests for metadata models.""" + import json import pytest @@ -82,7 +83,10 @@ def test_charm_metadata_from_charm_success(charm_dict, expected): charm = project.CharmcraftProject.unmarshal(charm_dict) - assert json.loads(json.dumps(metadata.CharmMetadata.from_charm(charm).marshal())) == expected + assert ( + json.loads(json.dumps(metadata.CharmMetadata.from_charm(charm).marshal())) + == expected + ) @pytest.mark.parametrize( @@ -95,5 +99,6 @@ def test_bundle_metadata_from_bundle(bundle_dict, expected): bundle = project.Bundle.unmarshal(BASIC_BUNDLE_DICT) assert ( - json.loads(json.dumps(metadata.BundleMetadata.from_bundle(bundle).marshal())) == expected + json.loads(json.dumps(metadata.BundleMetadata.from_bundle(bundle).marshal())) + == expected ) diff --git a/tests/unit/models/test_project.py b/tests/unit/models/test_project.py index bf9326a99..045111cf0 100644 --- a/tests/unit/models/test_project.py +++ b/tests/unit/models/test_project.py @@ -22,6 +22,7 @@ from textwrap import dedent from typing import Any +import craft_cli.pytest_plugin import hypothesis import pydantic import pyfakefs.fake_filesystem @@ -40,7 +41,9 @@ SIMPLE_BASE = Base(name="simple", channel="0.0") BASE_WITH_ONE_ARCH = Base(name="arch", channel="1.0", architectures=["amd64"]) -BASE_WITH_MULTIARCH = Base(name="multiarch", channel="2.0", architectures=["arm64", "riscv64"]) +BASE_WITH_MULTIARCH = Base( + name="multiarch", channel="2.0", architectures=["arm64", "riscv64"] +) SIMPLE_BASENAME = bases.BaseName("simple", "0.0") ONE_ARCH_BASENAME = bases.BaseName("arch", "1.0") MULTIARCH_BASENAME = bases.BaseName("multiarch", "2.0") @@ -92,7 +95,26 @@ channel: "22.04" architectures: [arm64] """ -SIMPLE_METADATA_YAML = "{name: charmy-mccharmface, summary: Charmy!, description: Very charming!}" +MINIMAL_CHARMCRAFT_DICT = { + "type": "charm", + "bases": [ + { + "build-on": [ + { + "name": "ubuntu", + "channel": "22.04", + "architectures": [util.get_host_architecture()], + }, + ], + "run-on": [ + {"name": "ubuntu", "channel": "22.04", "architectures": ["arm64"]}, + ], + } + ], +} +SIMPLE_METADATA_YAML = ( + "{name: charmy-mccharmface, summary: Charmy!, description: Very charming!}" +) SIMPLE_CHARMCRAFT_YAML = f"""\ type: charm name: charmy-mccharmface @@ -108,12 +130,60 @@ channel: "22.04" architectures: [arm64] """ -SIMPLE_CONFIG_YAML = "options: {admin: {default: root, description: Admin user, type: string}}" +SIMPLE_CHARMCRAFT_DICT = MINIMAL_CHARMCRAFT_DICT | { + "name": "charmy-mccharmface", + "summary": "Charmy!", + "description": "Very charming!", +} +SIMPLE_CONFIG_YAML = ( + "options: {admin: {default: root, description: Admin user, type: string}}" +) SIMPLE_CONFIG_DICT = { - "options": {"admin": {"type": "string", "default": "root", "description": "Admin user"}} + "options": { + "admin": {"type": "string", "default": "root", "description": "Admin user"} + } } SIMPLE_ACTIONS_YAML = "snooze: {description: Take a little nap.}" SIMPLE_ACTIONS_DICT = {"snooze": {"description": "Take a little nap."}} +CHARMCRAFT_YAML_NON_VECTORISED_PLATFORMS = """\ +type: charm +name: test-1874-regression +summary: Regression test for #1874 +description: A charm for regression testing https://github.com/canonical/charmcraft/issues/1874 + +parts: + charm: + plugin: dump + source: . + prime: + - actions/* + - files/* + - hooks/* + - lib/* + - templates/* + - config.yaml + - copyright + - icon.svg + - LICENSE + - Makefile + - metadata.yaml + - README.md + +base: ubuntu@24.04 +platforms: + amd64: + build-on: amd64 + build-for: amd64 + arm64: + build-on: arm64 + build-for: arm64 + ppc64el: + build-on: ppc64el + build-for: ppc64el + s390x: + build-on: s390x + build-for: s390x +""" # region CharmPlatform tests @@ -186,13 +256,17 @@ def test_platform_from_multiple_bases(bases, expected): *( list(x) for x in itertools.combinations(const.CharmArch, 1) ), # A single architecture in a list - *(list(x) for x in itertools.combinations(const.CharmArch, 2)), # Two architectures in a list + *( + list(x) for x in itertools.combinations(const.CharmArch, 2) + ), # Two architectures in a list ] # endregion # region CharmBuildInfo tests -@pytest.mark.parametrize("build_on_base", [SIMPLE_BASE, BASE_WITH_ONE_ARCH, BASE_WITH_MULTIARCH]) +@pytest.mark.parametrize( + "build_on_base", [SIMPLE_BASE, BASE_WITH_ONE_ARCH, BASE_WITH_MULTIARCH] +) @pytest.mark.parametrize("build_on_arch", ["amd64", "arm64", "riscv64", "s390x"]) @pytest.mark.parametrize("run_on", [SIMPLE_BASE, BASE_WITH_ONE_ARCH]) def test_build_info_from_build_on_run_on_basic( @@ -222,7 +296,9 @@ def test_build_info_from_build_on_run_on_basic( ], ) @pytest.mark.parametrize("lib_version", ["0", "1", "2.0", "2.1", "3.14"]) -def test_create_valid_charm_lib(lib_name: str, expected_lib_name: str, lib_version: str): +def test_create_valid_charm_lib( + lib_name: str, expected_lib_name: str, lib_version: str +): lib = project.CharmLib.unmarshal({"lib": lib_name, "version": lib_version}) assert lib.lib == expected_lib_name @@ -334,7 +410,9 @@ def test_build_info_from_build_on_run_on_multi_arch(run_on, expected): ], ) def test_build_info_generator(given, expected): - assert list(project.CharmBuildInfo.gen_from_bases_configurations(*given)) == expected + assert ( + list(project.CharmBuildInfo.gen_from_bases_configurations(*given)) == expected + ) # endregion @@ -452,7 +530,9 @@ def test_build_info_generator(given, expected): platform=f"ubuntu-22.04-{util.get_host_architecture()}", build_on=util.get_host_architecture(), build_for=util.get_host_architecture(), - build_for_bases=[project.charmcraft.Base(name="ubuntu", channel="22.04")], + build_for_bases=[ + project.charmcraft.Base(name="ubuntu", channel="22.04") + ], build_on_index=0, base=bases.BaseName("ubuntu", "22.04"), bases_index=0, @@ -461,7 +541,11 @@ def test_build_info_generator(given, expected): id="basic-bases", ), pytest.param( - {"bases": [{"build-on": [BASE_WITH_ONE_ARCH], "run-on": [BASE_WITH_ONE_ARCH]}]}, + { + "bases": [ + {"build-on": [BASE_WITH_ONE_ARCH], "run-on": [BASE_WITH_ONE_ARCH]} + ] + }, [ project.CharmBuildInfo( platform="arch-1.0-amd64", @@ -503,7 +587,9 @@ def test_build_planner_correct(data, expected): }, ], ) -def test_build_planner_platforms_combinations(base, build_base, build_plan_basename, platforms): +def test_build_planner_platforms_combinations( + base, build_base, build_plan_basename, platforms +): """Test that we're able to create a valid platform for each of these combinations.""" planner = project.CharmcraftBuildPlanner( base=base, @@ -519,12 +605,18 @@ def test_build_planner_platforms_combinations(base, build_base, build_plan_basen @pytest.mark.parametrize("architecture", sorted(const.SUPPORTED_ARCHITECTURES)) @pytest.mark.parametrize("system", ["ubuntu", "linux", "macos", "windows", "plan9"]) -@pytest.mark.parametrize("release", ["22.04", "2.6.32", "10.5", "vista", "from bell labs"]) +@pytest.mark.parametrize( + "release", ["22.04", "2.6.32", "10.5", "vista", "from bell labs"] +) def test_get_bundle_plan(mocker, architecture, release, system): - mocker.patch("craft_application.util.get_host_architecture", return_value=architecture) + mocker.patch( + "craft_application.util.get_host_architecture", return_value=architecture + ) mocker.patch( "charmcraft.utils.get_os_platform", - return_value=utils.OSPlatform(machine=architecture, system=system, release=release), + return_value=utils.OSPlatform( + machine=architecture, system=system, release=release + ), ) planner = project.CharmcraftBuildPlanner(type="bundle") @@ -575,7 +667,7 @@ def test_unmarshal_invalid_type(type_): "metadata_yaml", "config_yaml", "actions_yaml", - "expected_diff", + "expected_dict", ), [ ( @@ -583,35 +675,38 @@ def test_unmarshal_invalid_type(type_): None, None, None, - {"parts": BASIC_CHARM_PARTS}, + SIMPLE_CHARMCRAFT_DICT | {"parts": BASIC_CHARM_PARTS}, ), ( MINIMAL_CHARMCRAFT_YAML, SIMPLE_METADATA_YAML, None, None, - {"parts": BASIC_CHARM_PARTS}, + SIMPLE_CHARMCRAFT_DICT | {"parts": BASIC_CHARM_PARTS}, ), ( SIMPLE_CHARMCRAFT_YAML, None, SIMPLE_CONFIG_YAML, None, - {"config": SIMPLE_CONFIG_DICT, "parts": BASIC_CHARM_PARTS}, + SIMPLE_CHARMCRAFT_DICT + | {"config": SIMPLE_CONFIG_DICT, "parts": BASIC_CHARM_PARTS}, ), ( SIMPLE_CHARMCRAFT_YAML, None, None, SIMPLE_ACTIONS_YAML, - {"actions": SIMPLE_ACTIONS_DICT, "parts": BASIC_CHARM_PARTS}, + SIMPLE_CHARMCRAFT_DICT + | {"actions": SIMPLE_ACTIONS_DICT, "parts": BASIC_CHARM_PARTS}, ), ( MINIMAL_CHARMCRAFT_YAML, SIMPLE_METADATA_YAML, SIMPLE_CONFIG_YAML, SIMPLE_ACTIONS_YAML, - { + SIMPLE_CHARMCRAFT_DICT + | { "actions": SIMPLE_ACTIONS_DICT, "config": SIMPLE_CONFIG_DICT, "parts": BASIC_CHARM_PARTS, @@ -630,7 +725,8 @@ def test_unmarshal_invalid_type(type_): None, None, None, - { + SIMPLE_CHARMCRAFT_DICT + | { "parts": { "charm": { "plugin": "charm", @@ -647,6 +743,46 @@ def test_unmarshal_invalid_type(type_): }, id="implicit-parts-plugins", ), + pytest.param( + CHARMCRAFT_YAML_NON_VECTORISED_PLATFORMS, + None, + None, + None, + { + "name": "test-1874-regression", + "summary": "Regression test for", + "description": "A charm for regression testing https://github.com/canonical/charmcraft/issues/1874", + "base": "ubuntu@24.04", + "platforms": { + "amd64": {"build-on": ["amd64"], "build-for": ["amd64"]}, + "arm64": {"build-on": ["arm64"], "build-for": ["arm64"]}, + "ppc64el": {"build-on": ["ppc64el"], "build-for": ["ppc64el"]}, + "s390x": {"build-on": ["s390x"], "build-for": ["s390x"]}, + }, + "parts": { + "charm": { + "plugin": "dump", + "source": ".", + "prime": [ + "actions/*", + "files/*", + "hooks/*", + "lib/*", + "templates/*", + "config.yaml", + "copyright", + "icon.svg", + "LICENSE", + "Makefile", + "metadata.yaml", + "README.md", + ], + } + }, + "type": "charm", + }, + id="1874-regression", + ), ], ) def test_from_yaml_file_success( @@ -656,11 +792,8 @@ def test_from_yaml_file_success( metadata_yaml: str | None, config_yaml: str | None, actions_yaml: str | None, - expected_diff: dict[str, Any], + expected_dict: dict[str, Any], ): - expected_dict = simple_charm.marshal().copy() - expected_dict.update(expected_diff) - fs.create_file("/charmcraft.yaml", contents=charmcraft_yaml) if metadata_yaml: fs.create_file("/metadata.yaml", contents=metadata_yaml) @@ -742,6 +875,51 @@ def test_from_yaml_file_exception( assert exc.value.details == details +@pytest.mark.parametrize( + ("cls", "content"), + [ + ( + project.BasesCharm, + { + "type": "charm", + "name": "blah", + "summary": "", + "description": "", + "bases": [{"name": "ubuntu", "channel": "22.04"}], + "charmhub": {"api_url": "http://charmhub.io"}, + }, + ), + ( + project.PlatformCharm, + { + "type": "charm", + "name": "blah", + "summary": "", + "description": "", + "base": "ubuntu@24.04", + "platforms": {"amd64": None}, + "charmhub": {"api_url": "http://charmhub.io"}, + }, + ), + ( + project.Bundle, + {"type": "bundle", "charmhub": {"api_url": "http://charmhub.io"}}, + ), + ], +) +def test_warn_on_deprecated_charmhub( + emitter: craft_cli.pytest_plugin.RecordingEmitter, cls, content +): + with pytest.warns(DeprecationWarning): + cls.model_validate(content) + emitter.assert_progress( + "WARNING: The 'charmhub' field is deprecated and no longer used. It will be removed in a " + f"future release. Use the ${const.STORE_API_ENV_VAR}, ${const.STORE_STORAGE_ENV_VAR} and " + f"${const.STORE_REGISTRY_ENV_VAR} environment variables instead.", + permanent=True, + ) + + # endregion # region Charm tests @pytest.mark.parametrize( @@ -754,7 +932,9 @@ def test_from_yaml_file_exception( ), ], ) -def test_instantiate_bases_charm_success(values: dict[str, Any], expected_changes: dict[str, Any]): +def test_instantiate_bases_charm_success( + values: dict[str, Any], expected_changes: dict[str, Any] +): """Various successful instantiations of a charm project.""" values.update( { @@ -767,7 +947,7 @@ def test_instantiate_bases_charm_success(values: dict[str, Any], expected_change expected = values.copy() expected.update(expected_changes) - actual = project.BasesCharm(**values) + actual = project.BasesCharm.model_validate(values) assert actual.marshal() == expected diff --git a/tests/unit/models/valid_charms_yaml/full-bases.yaml b/tests/unit/models/valid_charms_yaml/full-bases.yaml index 4e3663c3e..f27b68012 100644 --- a/tests/unit/models/valid_charms_yaml/full-bases.yaml +++ b/tests/unit/models/valid_charms_yaml/full-bases.yaml @@ -10,9 +10,6 @@ analysis: - framework linters: - entrypoint -charmhub: - api-url: https://api.staging.charmhub.io/ - storage-url: https://storage.staging.snapcraftcontent.com/ parts: im-not-calling-this-what-you-expect: plugin: charm diff --git a/tests/unit/models/valid_charms_yaml/full-platforms.yaml b/tests/unit/models/valid_charms_yaml/full-platforms.yaml index 03dca4686..a6dfb93c8 100644 --- a/tests/unit/models/valid_charms_yaml/full-platforms.yaml +++ b/tests/unit/models/valid_charms_yaml/full-platforms.yaml @@ -10,9 +10,6 @@ analysis: - framework linters: - entrypoint -charmhub: - api-url: https://api.staging.charmhub.io/ - storage-url: https://storage.staging.snapcraftcontent.com/ parts: im-not-calling-this-what-you-expect: plugin: charm diff --git a/tests/unit/parts/conftest.py b/tests/unit/parts/conftest.py index 352fd8b1f..71a33f8d8 100644 --- a/tests/unit/parts/conftest.py +++ b/tests/unit/parts/conftest.py @@ -13,8 +13,23 @@ # limitations under the License. # # For further info, check https://github.com/canonical/charmcraft +import pathlib import sys import pytest pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Windows not supported") + + +@pytest.fixture +def build_path(tmp_path: pathlib.Path) -> pathlib.Path: + path = tmp_path / "parts" / "foo" / "build" + path.mkdir(parents=True) + return path + + +@pytest.fixture +def install_path(tmp_path: pathlib.Path) -> pathlib.Path: + path = tmp_path / "parts" / "foo" / "install" + path.mkdir(parents=True) + return path diff --git a/tests/unit/parts/plugins/__init__.py b/tests/unit/parts/plugins/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit/parts/test_bundle.py b/tests/unit/parts/plugins/test_bundle.py similarity index 100% rename from tests/unit/parts/test_bundle.py rename to tests/unit/parts/plugins/test_bundle.py diff --git a/tests/unit/parts/test_charm.py b/tests/unit/parts/plugins/test_charm.py similarity index 89% rename from tests/unit/parts/test_charm.py rename to tests/unit/parts/plugins/test_charm.py index 2feab3ab8..77c47e8f0 100644 --- a/tests/unit/parts/test_charm.py +++ b/tests/unit/parts/plugins/test_charm.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Unit tests for charm plugin.""" + import pathlib import sys from unittest.mock import patch @@ -90,7 +91,9 @@ def test_charmplugin_get_build_environment_ubuntu(charm_plugin, mocker): mock_version = mocker.patch("craft_parts.utils.os_utils.OsRelease.version_id") mock_id.return_value = "ubuntu" mock_version.return_value = "22.04" - assert charm_plugin.get_build_environment() == {"CRYPTOGRAPHY_OPENSSL_NO_LEGACY": "true"} + assert charm_plugin.get_build_environment() == { + "CRYPTOGRAPHY_OPENSSL_NO_LEGACY": "true" + } def test_charmplugin_get_build_environment_centos_7(charm_plugin, mocker, monkeypatch): @@ -105,7 +108,9 @@ def test_charmplugin_get_build_environment_centos_7(charm_plugin, mocker, monkey } -def test_charmplugin_get_build_commands_ubuntu(charm_plugin, tmp_path, mocker, monkeypatch): +def test_charmplugin_get_build_commands_ubuntu( + charm_plugin, tmp_path, mocker, monkeypatch +): monkeypatch.setenv("PATH", "/some/path") monkeypatch.setenv("SNAP", "snap_value") monkeypatch.setenv("SNAP_ARCH", "snap_arch_value") @@ -146,10 +151,14 @@ def test_charmplugin_get_build_commands_ubuntu(charm_plugin, tmp_path, mocker, m ] # check the callback is properly registered for running own method after build - mock_register.assert_called_with(charm_plugin.post_build_callback, step_list=[Step.BUILD]) + mock_register.assert_called_with( + charm_plugin.post_build_callback, step_list=[Step.BUILD] + ) -def test_charmplugin_get_build_commands_centos_7(charm_plugin, tmp_path, mocker, monkeypatch): +def test_charmplugin_get_build_commands_centos_7( + charm_plugin, tmp_path, mocker, monkeypatch +): monkeypatch.setenv("PATH", "/some/path") monkeypatch.setenv("SNAP", "snap_value") monkeypatch.setenv("SNAP_ARCH", "snap_arch_value") @@ -193,7 +202,9 @@ def test_charmplugin_get_build_commands_centos_7(charm_plugin, tmp_path, mocker, ] # check the callback is properly registered for running own method after build - mock_register.assert_called_with(charm_plugin.post_build_callback, step_list=[Step.BUILD]) + mock_register.assert_called_with( + charm_plugin.post_build_callback, step_list=[Step.BUILD] + ) def test_charmplugin_post_build_metric_collection(charm_plugin): @@ -205,7 +216,7 @@ def test_charmplugin_post_build_metric_collection(charm_plugin): def test_charmpluginproperties_invalid_properties(): content = {"source": ".", "charm-invalid": True} with pytest.raises(pydantic.ValidationError) as raised: - parts.CharmPlugin.properties_class.unmarshal(content) + parts.plugins.CharmPlugin.properties_class.unmarshal(content) err = raised.value.errors() assert len(err) == 1 @@ -216,14 +227,14 @@ def test_charmpluginproperties_invalid_properties(): def test_charmpluginproperties_entrypoint_ok(): """Simple valid entrypoint.""" content = {"source": ".", "charm-entrypoint": "myep.py"} - properties = parts.CharmPlugin.properties_class.unmarshal(content) + properties = parts.plugins.CharmPlugin.properties_class.unmarshal(content) assert properties.charm_entrypoint == "myep.py" def test_charmpluginproperties_entrypoint_default(): """Specific default if not configured.""" content = {"source": "."} - properties = parts.CharmPlugin.properties_class.unmarshal(content) + properties = parts.plugins.CharmPlugin.properties_class.unmarshal(content) assert properties.charm_entrypoint == "src/charm.py" @@ -231,7 +242,7 @@ def test_charmpluginproperties_entrypoint_relative(tmp_path): """The configuration is stored relative no matter what.""" absolute_path = tmp_path / "myep.py" content = {"source": str(tmp_path), "charm-entrypoint": str(absolute_path)} - properties = parts.CharmPlugin.properties_class.unmarshal(content) + properties = parts.plugins.CharmPlugin.properties_class.unmarshal(content) assert properties.charm_entrypoint == "myep.py" @@ -240,7 +251,7 @@ def test_charmpluginproperties_entrypoint_outside_project_absolute(tmp_path): outside_path = tmp_path.parent / "charm.py" content = {"source": str(tmp_path), "charm-entrypoint": str(outside_path)} with pytest.raises(pydantic.ValidationError) as raised: - parts.CharmPlugin.properties_class.unmarshal(content) + parts.plugins.CharmPlugin.properties_class.unmarshal(content) err = raised.value.errors() assert len(err) == 1 assert err[0]["loc"] == ("charm-entrypoint",) @@ -255,7 +266,7 @@ def test_charmpluginproperties_entrypoint_outside_project_relative(tmp_path): outside_path = tmp_path.parent / "charm.py" content = {"source": str(tmp_path), "charm-entrypoint": "../charm.py"} with pytest.raises(pydantic.ValidationError) as raised: - parts.CharmPlugin.properties_class.unmarshal(content) + parts.plugins.CharmPlugin.properties_class.unmarshal(content) err = raised.value.errors() assert len(err) == 1 assert err[0]["loc"] == ("charm-entrypoint",) @@ -268,7 +279,7 @@ def test_charmpluginproperties_entrypoint_outside_project_relative(tmp_path): def test_charmpluginproperties_requirements_default(tmp_path): """The configuration is empty by default.""" content = {"source": str(tmp_path)} - properties = parts.CharmPlugin.properties_class.unmarshal(content) + properties = parts.plugins.CharmPlugin.properties_class.unmarshal(content) assert properties.charm_requirements == [] @@ -276,7 +287,7 @@ def test_charmpluginproperties_requirements_filepresent_ok(tmp_path: pathlib.Pat """If a specific file is present in disk it's used.""" (tmp_path / "requirements.txt").write_text("somedep") content = {"source": str(tmp_path)} - properties = parts.CharmPluginProperties.unmarshal(content) + properties = parts.plugins.CharmPluginProperties.unmarshal(content) assert properties.charm_requirements == ["requirements.txt"] @@ -285,5 +296,5 @@ def test_charmpluginproperties_requirements_filepresent_but_configured(tmp_path) (tmp_path / "requirements.txt").write_text("somedep") (tmp_path / "alternative.txt").write_text("somedep") content = {"source": str(tmp_path), "charm-requirements": ["alternative.txt"]} - properties = parts.CharmPlugin.properties_class.unmarshal(content) + properties = parts.plugins.CharmPlugin.properties_class.unmarshal(content) assert properties.charm_requirements == ["alternative.txt"] diff --git a/tests/unit/parts/plugins/test_poetry.py b/tests/unit/parts/plugins/test_poetry.py new file mode 100644 index 000000000..d9ac2c820 --- /dev/null +++ b/tests/unit/parts/plugins/test_poetry.py @@ -0,0 +1,114 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Unit tests for the Charmcraft-specific poetry plugin.""" + +import pathlib +import sys + +import pytest +import pytest_check + +from charmcraft.parts import plugins + +pytestmark = [ + pytest.mark.skipif(sys.platform == "win32", reason="Windows not supported") +] + + +def test_get_build_environment( + poetry_plugin: plugins.PoetryPlugin, install_path: pathlib.Path +): + env = poetry_plugin.get_build_environment() + + assert env["PIP_NO_BINARY"] == ":all:" + + +def test_get_venv_directory( + poetry_plugin: plugins.PoetryPlugin, install_path: pathlib.Path +): + assert poetry_plugin._get_venv_directory() == install_path / "venv" + + +def test_get_pip_install_commands(poetry_plugin: plugins.PoetryPlugin): + poetry_plugin._get_pip = lambda: "/python -m pip" + + assert poetry_plugin._get_pip_install_commands( + pathlib.Path("/my dir/reqs.txt") + ) == [ + "/python -m pip install --no-deps '--requirement=/my dir/reqs.txt'", + "/python -m pip check", + ] + + +def test_get_package_install_commands( + poetry_plugin: plugins.PoetryPlugin, + build_path: pathlib.Path, + install_path: pathlib.Path, +): + copy_src_cmd = ( + f"cp --archive --recursive --reflink=auto {build_path}/src {install_path}" + ) + copy_lib_cmd = ( + f"cp --archive --recursive --reflink=auto {build_path}/lib {install_path}" + ) + + # Check if no src or libs exist + default_commands = poetry_plugin._get_package_install_commands() + + pytest_check.is_not_in(copy_src_cmd, default_commands) + pytest_check.is_not_in(copy_lib_cmd, default_commands) + + # With a src directory + (build_path / "src").mkdir(parents=True) + + pytest_check.equal( + poetry_plugin._get_package_install_commands(), [*default_commands, copy_src_cmd] + ) + + # With both src and lib + (build_path / "lib" / "charm").mkdir(parents=True) + + pytest_check.equal( + poetry_plugin._get_package_install_commands(), + [*default_commands, copy_src_cmd, copy_lib_cmd], + ) + + # With only lib + (build_path / "src").rmdir() + + pytest_check.equal( + poetry_plugin._get_package_install_commands(), [*default_commands, copy_lib_cmd] + ) + + +def test_get_rm_command( + poetry_plugin: plugins.PoetryPlugin, install_path: pathlib.Path +): + assert f"rm -rf {install_path / 'venv/bin'}" in poetry_plugin.get_build_commands() + + +def test_no_get_rm_command( + tmp_path, poetry_plugin: plugins.PoetryPlugin, install_path: pathlib.Path +): + spec = { + "plugin": "poetry", + "source": str(tmp_path), + "poetry-keep-bins": True, + } + poetry_plugin._options = plugins.PoetryPluginProperties.unmarshal(spec) + assert ( + f"rm -rf {install_path / 'venv/bin'}" not in poetry_plugin.get_build_commands() + ) diff --git a/tests/unit/parts/plugins/test_python.py b/tests/unit/parts/plugins/test_python.py new file mode 100644 index 000000000..5010e4ddf --- /dev/null +++ b/tests/unit/parts/plugins/test_python.py @@ -0,0 +1,123 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Unit tests for the Charmcraft-specific python plugin.""" + +import pathlib +import shlex +import sys + +import pytest +import pytest_check + +from charmcraft.parts import plugins + +pytestmark = [ + pytest.mark.skipif(sys.platform == "win32", reason="Windows not supported") +] + + +def test_get_build_environment( + python_plugin: plugins.PythonPlugin, install_path: pathlib.Path +): + env = python_plugin.get_build_environment() + + assert env["PIP_NO_BINARY"] == ":all:" + + +def test_get_venv_directory( + python_plugin: plugins.PythonPlugin, install_path: pathlib.Path +): + assert python_plugin._get_venv_directory() == install_path / "venv" + + +@pytest.mark.parametrize("constraints", [[], ["constraints.txt"]]) +@pytest.mark.parametrize("requirements", [[], ["requirements.txt"]]) +@pytest.mark.parametrize("packages", [[], ["distro==1.4.0"]]) +def test_get_package_install_commands( + tmp_path: pathlib.Path, + python_plugin: plugins.PythonPlugin, + build_path: pathlib.Path, + install_path: pathlib.Path, + constraints: list[str], + requirements: list[str], + packages: list[str], +): + spec = { + "plugin": "python", + "source": str(tmp_path), + "python-constraints": constraints, + "python-requirements": requirements, + "python-packages": packages, + } + python_plugin._options = plugins.PythonPluginProperties.unmarshal(spec) + python_plugin._get_pip = lambda: "/python -m pip" + copy_src_cmd = ( + f"cp --archive --recursive --reflink=auto {build_path}/src {install_path}" + ) + copy_lib_cmd = ( + f"cp --archive --recursive --reflink=auto {build_path}/lib {install_path}" + ) + + actual = python_plugin._get_package_install_commands() + + with pytest_check.check(): + assert actual[0].startswith("/python -m pip") + with pytest_check.check(): + assert actual[1].startswith("/python -m pip") + split_install_command = shlex.split(actual[0]) + for constraints_file in constraints: + pytest_check.is_in(f"--constraint={constraints_file}", split_install_command) + for requirements_file in requirements: + pytest_check.is_in(f"--requirement={requirements_file}", split_install_command) + for package in packages: + pytest_check.is_in(package, split_install_command) + pytest_check.is_not_in(copy_src_cmd, actual) + pytest_check.is_not_in(copy_lib_cmd, actual) + + (build_path / "src").mkdir() + + pytest_check.is_in(copy_src_cmd, python_plugin._get_package_install_commands()) + pytest_check.is_not_in(copy_lib_cmd, python_plugin._get_package_install_commands()) + + (build_path / "lib").mkdir() + + pytest_check.is_in(copy_src_cmd, python_plugin._get_package_install_commands()) + pytest_check.is_in(copy_lib_cmd, python_plugin._get_package_install_commands()) + + (build_path / "src").rmdir() + + pytest_check.is_not_in(copy_src_cmd, python_plugin._get_package_install_commands()) + pytest_check.is_in(copy_lib_cmd, python_plugin._get_package_install_commands()) + + +def test_get_rm_command( + python_plugin: plugins.PythonPlugin, install_path: pathlib.Path +): + assert f"rm -rf {install_path / 'venv/bin'}" in python_plugin.get_build_commands() + + +def test_no_get_rm_command( + tmp_path, python_plugin: plugins.PythonPlugin, install_path: pathlib.Path +): + spec = { + "plugin": "python", + "source": str(tmp_path), + "python-keep-bins": True, + } + python_plugin._options = plugins.PythonPluginProperties.unmarshal(spec) + assert ( + f"rm -rf {install_path / 'venv/bin'}" not in python_plugin.get_build_commands() + ) diff --git a/tests/unit/parts/test_reactive.py b/tests/unit/parts/plugins/test_reactive.py similarity index 94% rename from tests/unit/parts/test_reactive.py rename to tests/unit/parts/plugins/test_reactive.py index f9eda5cad..942883f0e 100644 --- a/tests/unit/parts/test_reactive.py +++ b/tests/unit/parts/plugins/test_reactive.py @@ -26,9 +26,11 @@ from craft_parts.errors import PluginEnvironmentValidationError from charmcraft import const -from charmcraft.parts import reactive +from charmcraft.parts.plugins import _reactive -pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported") +pytestmark = pytest.mark.skipif( + sys.platform == "win32", reason="Windows not [yet] supported" +) @pytest.fixture @@ -69,7 +71,7 @@ def spec(tmp_path): @pytest.fixture def plugin_properties(spec): - return reactive.ReactivePluginProperties.unmarshal(spec) + return _reactive.ReactivePluginProperties.unmarshal(spec) @pytest.fixture @@ -87,7 +89,9 @@ def plugin(tmp_path, plugin_properties, spec): ) part_info = craft_parts.PartInfo(project_info=project_info, part=part) - return plugins.get_plugin(part=part, part_info=part_info, properties=plugin_properties) + return plugins.get_plugin( + part=part, part_info=part_info, properties=plugin_properties + ) def test_get_build_package(plugin): @@ -104,7 +108,7 @@ def test_get_build_environment(plugin): def test_get_build_commands(plugin, tmp_path): assert plugin.get_build_commands() == [ - f"{sys.executable} -I {reactive.__file__} fake-project " + f"{sys.executable} -I {_reactive.__file__} fake-project " f"{tmp_path}/parts/foo/build {tmp_path}/parts/foo/install " "--charm-argument --charm-argument-with argument" ] @@ -178,7 +182,7 @@ def fake_run(): def test_build(build_dir, install_dir, fake_run): fake_run.return_value = CompletedProcess(("charm", "build"), 0) - returncode = reactive.build( + returncode = _reactive.build( charm_name="test-charm", build_dir=build_dir, install_dir=install_dir, @@ -207,7 +211,7 @@ def test_build(build_dir, install_dir, fake_run): def test_build_charm_proof_raises_error_messages(build_dir, install_dir, fake_run): fake_run.side_effect = CalledProcessError(200, "E: name missing") - returncode = reactive.build( + returncode = _reactive.build( charm_name="test-charm", build_dir=build_dir, install_dir=install_dir, @@ -226,7 +230,7 @@ def test_build_charm_proof_raises_warning_messages_does_not_raise( ): fake_run.side_effect = CalledProcessError(100, "W: Description is not pretty") - returncode = reactive.build( + returncode = _reactive.build( charm_name="test-charm", build_dir=build_dir, install_dir=install_dir, @@ -266,7 +270,7 @@ def _run_generator(): fake_run.side_effect = _run_generator() - returncode = reactive.build( + returncode = _reactive.build( charm_name="test-charm", build_dir=build_dir, install_dir=install_dir, @@ -292,7 +296,7 @@ def _run_generator(): ] # Also ensure negative return codes raises error - returncode = reactive.build( + returncode = _reactive.build( charm_name="test-charm", build_dir=build_dir, install_dir=install_dir, @@ -315,7 +319,7 @@ def _run_generator(): fake_run.side_effect = _run_generator() - returncode = reactive.build( + returncode = _reactive.build( charm_name="test-charm", build_dir=build_dir, install_dir=install_dir, diff --git a/tests/unit/parts/test_lifecycle.py b/tests/unit/parts/test_lifecycle.py index 00037ec4e..7784ca038 100644 --- a/tests/unit/parts/test_lifecycle.py +++ b/tests/unit/parts/test_lifecycle.py @@ -178,7 +178,9 @@ def test_partslifecycle_run_actions_progress(tmp_path, monkeypatch, emitter): with patch("craft_parts.LifecycleManager.plan") as mock_plan: mock_plan.return_value = [action1, action2] - with patch("craft_parts.executor.executor.ExecutionContext.execute") as mock_exec: + with patch( + "craft_parts.executor.executor.ExecutionContext.execute" + ) as mock_exec: lc.run(Step.PRIME) emitter.assert_progress("Running step STAGE for part 'testpart'") diff --git a/tests/unit/services/test_analysis.py b/tests/unit/services/test_analysis.py index d3c3d9eb4..a2ff9520e 100644 --- a/tests/unit/services/test_analysis.py +++ b/tests/unit/services/test_analysis.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Unit tests for analysis service.""" + import pathlib import tempfile import zipfile @@ -57,12 +58,19 @@ def run(self, basedir: pathlib.Path) -> str: STUB_ATTRIBUTE_CHECKERS = [ StubAttributeChecker( - "unknown_attribute", "https://example.com/unknown", "returns unknown", LintResult.UNKNOWN + "unknown_attribute", + "https://example.com/unknown", + "returns unknown", + LintResult.UNKNOWN, + ), + StubAttributeChecker( + "says_python", "https://python.org", "returns python", "python" ), - StubAttributeChecker("says_python", "https://python.org", "returns python", "python"), ] STUB_CHECKER_RESULTS = [ - CheckResult(linter.name, linter.result, linter.url, CheckType.ATTRIBUTE, linter.text) + CheckResult( + linter.name, linter.result, linter.url, CheckType.ATTRIBUTE, linter.text + ) for linter in STUB_ATTRIBUTE_CHECKERS ] ATTRIBUTE_CHECKER_NAMES = frozenset(checker.name for checker in STUB_ATTRIBUTE_CHECKERS) @@ -86,7 +94,9 @@ def run(self, basedir: pathlib.Path) -> str: @pytest.fixture def mock_temp_dir(monkeypatch): mock_obj = mock.MagicMock(spec=tempfile.TemporaryDirectory) - monkeypatch.setattr(tempfile, "TemporaryDirectory", mock.Mock(return_value=mock_obj)) + monkeypatch.setattr( + tempfile, "TemporaryDirectory", mock.Mock(return_value=mock_obj) + ) return mock_obj @@ -104,7 +114,10 @@ def analysis_service(): @pytest.mark.parametrize( ("checkers", "expected"), - [(STUB_ATTRIBUTE_CHECKERS, STUB_CHECKER_RESULTS), (STUB_LINTERS, STUB_LINTER_RESULTS)], + [ + (STUB_ATTRIBUTE_CHECKERS, STUB_CHECKER_RESULTS), + (STUB_LINTERS, STUB_LINTER_RESULTS), + ], ) def test_lint_directory_results(monkeypatch, analysis_service, checkers, expected): monkeypatch.setattr(linters, "CHECKERS", checkers) @@ -114,22 +127,29 @@ def test_lint_directory_results(monkeypatch, analysis_service, checkers, expecte @pytest.mark.parametrize("checkers", [STUB_ATTRIBUTE_CHECKERS + STUB_LINTERS]) @pytest.mark.parametrize( - "ignore", [set(), {"success"}, ATTRIBUTE_CHECKER_NAMES, LINTER_NAMES, ALL_CHECKER_NAMES] + "ignore", + [set(), {"success"}, ATTRIBUTE_CHECKER_NAMES, LINTER_NAMES, ALL_CHECKER_NAMES], ) def test_lint_directory_ignores(monkeypatch, analysis_service, checkers, ignore): monkeypatch.setattr(linters, "CHECKERS", checkers) checker_names = {checker.name for checker in checkers} results = list( - analysis_service.lint_directory(pathlib.Path(), ignore=ignore, include_ignored=False) + analysis_service.lint_directory( + pathlib.Path(), ignore=ignore, include_ignored=False + ) ) checkers_run = {r.name for r in results} pytest_check.is_true(checkers_run.isdisjoint(ignore), f"{checkers_run & ignore}") - pytest_check.is_true(checkers_run.issubset(checker_names), str(checkers_run - checker_names)) + pytest_check.is_true( + checkers_run.issubset(checker_names), str(checkers_run - checker_names) + ) -def test_lint_file_results(fs, mock_temp_dir, mock_zip_file, monkeypatch, analysis_service): +def test_lint_file_results( + fs, mock_temp_dir, mock_zip_file, monkeypatch, analysis_service +): fake_charm = pathlib.Path("/fake/charm.charm") fs.create_file(fake_charm) mock_checker = mock.Mock() @@ -139,7 +159,9 @@ def test_lint_file_results(fs, mock_temp_dir, mock_zip_file, monkeypatch, analys results = list(analysis_service.lint_file(fake_charm)) with pytest_check.check: - mock_zip_file.__enter__.return_value.extractall.assert_called_once_with(fake_temp_path) + mock_zip_file.__enter__.return_value.extractall.assert_called_once_with( + fake_temp_path + ) with pytest_check.check: mock_checker.get_result.assert_called_once_with(fake_temp_path) pytest_check.equal(results, [mock_checker.get_result.return_value]) diff --git a/tests/unit/services/test_charmlibs.py b/tests/unit/services/test_charmlibs.py new file mode 100644 index 000000000..3fb4764ab --- /dev/null +++ b/tests/unit/services/test_charmlibs.py @@ -0,0 +1,160 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Unit tests for charmlibs service.""" + +import pathlib + +import pytest + +from charmcraft import services, utils +from charmcraft.store.models import Library + + +@pytest.fixture +def service(service_factory): + return service_factory.charm_libs + + +@pytest.fixture(params=["my-charm", "your-charm"]) +def charm_name(request) -> str: + return request.param + + +@pytest.fixture(params=["my_lib", "your_lib"]) +def lib_name(request) -> str: + return request.param + + +@pytest.fixture(params=[0, 1]) +def api(request) -> int: + return request.param + + +@pytest.fixture(params=[None, 0, 1]) +def patch(request) -> int | None: + return request.param + + +def test_is_downloaded_no_file( + fake_project_dir: pathlib.Path, + service: services.CharmLibsService, + charm_name: str, + lib_name: str, + api: int, + patch: int | None, +): + assert not service.is_downloaded( + charm_name=charm_name, lib_name=lib_name, api=api, patch=patch + ) + + +@pytest.mark.parametrize(("patch", "expected"), [(None, True), (1, True), (2, False)]) +def test_is_downloaded_with_file( + fake_project_dir: pathlib.Path, + service: services.CharmLibsService, + charm_name: str, + lib_name: str, + patch: int | None, + expected: bool, +): + lib_path = fake_project_dir / utils.get_lib_path(charm_name, lib_name, 0) + lib_path.parent.mkdir(parents=True) + lib_path.write_text("LIBID='abc'\nLIBAPI=0\nLIBPATCH=1\n") + + assert ( + service.is_downloaded( + charm_name=charm_name, lib_name=lib_name, api=0, patch=patch + ) + == expected + ) + + +@pytest.mark.parametrize( + ("charm_name", "lib_name", "lib_contents", "expected"), + [ + pytest.param( + "my-charm", + "my_lib", + "LIBID='abc'\nLIBAPI=0\nLIBPATCH=1\n", + (0, 1), + id="0.1", + ), + pytest.param( + "my-charm", + "my_lib", + "LIBID='abc'\nLIBAPI=16\nLIBPATCH=19\n", + (16, 19), + id="16.19", + ), + pytest.param( + "my-charm", + "my_lib", + "LIBID='abc'\nLIBAPI=0\nLIBPATCH=-1\n", + None, + id="patch_negative_1", + ), + pytest.param("my-charm", "my_lib", None, None, id="nonexistent"), + ], +) +def test_get_local_version( + fake_project_dir: pathlib.Path, + service: services.CharmLibsService, + charm_name: str, + lib_name: str, + lib_contents: str | None, + expected: tuple[int, int] | None, +): + if expected is not None: + lib_path = fake_project_dir / utils.get_lib_path( + charm_name, lib_name, expected[0] + ) + (fake_project_dir / lib_path).parent.mkdir(parents=True) + (fake_project_dir / lib_path).write_text(lib_contents) + + assert ( + service.get_local_version(charm_name=charm_name, lib_name=lib_name) == expected + ) + + +@pytest.mark.parametrize( + "lib", + [ + Library("lib_id", "lib_name", "charm_name", 0, 0, "some content", "hashy"), + ], +) +def test_write_success( + fake_project_dir: pathlib.Path, service: services.CharmLibsService, lib: Library +): + service.write(lib) + + actual = ( + fake_project_dir / utils.get_lib_path(lib.charm_name, lib.lib_name, lib.api) + ).read_text() + + assert actual == lib.content + + +@pytest.mark.parametrize( + "lib", + [ + Library("lib_id", "lib_name", "charm_name", 0, 0, None, "hashy"), + ], +) +def test_write_error( + fake_project_dir: pathlib.Path, service: services.CharmLibsService, lib: Library +): + with pytest.raises(ValueError, match="Library has no content"): + service.write(lib) diff --git a/tests/unit/services/test_image.py b/tests/unit/services/test_image.py index f6fdd0142..231635f82 100644 --- a/tests/unit/services/test_image.py +++ b/tests/unit/services/test_image.py @@ -15,7 +15,6 @@ # For further info, check https://github.com/canonical/charmcraft """Unit tests for the Image service.""" - import itertools import json from unittest import mock @@ -41,15 +40,67 @@ def mock_skopeo(fake_process) -> mock.Mock: @pytest.fixture def image_service(service_factory, mock_skopeo, mock_docker) -> services.ImageService: - service = services.ImageService(app=application.APP_METADATA, services=service_factory) + service = services.ImageService( + app=application.APP_METADATA, services=service_factory + ) service._skopeo = mock_skopeo service._docker = mock_docker return service -def test_get_maybe_id_from_docker_success(image_service: services.ImageService, mock_docker): +@pytest.mark.parametrize( + ("url", "name"), + [ + ( + "docker://hello-world@sha256:18a657d0cc1c7d0678a3fbea8b7eb4918bba25968d3e1b0adebfa71caddbc346", + "hello-world@sha256:18a657d0cc1c7d0678a3fbea8b7eb4918bba25968d3e1b0adebfa71caddbc346", + ), + ( + "hello-world@sha256:18a657d0cc1c7d0678a3fbea8b7eb4918bba25968d3e1b0adebfa71caddbc346", + "hello-world@sha256:18a657d0cc1c7d0678a3fbea8b7eb4918bba25968d3e1b0adebfa71caddbc346", + ), + ( + "docker://ghcr.io/canonical/charmed-mysql@sha256:89b8305613f6ce94f78a7c9b4baedef78f2816fd6bc74c00f6607bc5e57bd8e6", + "ghcr.io/canonical/charmed-mysql@sha256:89b8305613f6ce94f78a7c9b4baedef78f2816fd6bc74c00f6607bc5e57bd8e6", + ), + ( + "docker://quay.io/prometheus/blackbox-exporter:v0.24.0", + "quay.io/prometheus/blackbox-exporter:v0.24.0", + ), + ( + "docker://quay.io/prometheus/blackbox-exporter:v0.24.0@sha256:3af31f8bd1ad2907b4b0f7c485fde3de0a8ee0b498d42fc971f0698885c03acb", + "quay.io/prometheus/blackbox-exporter:v0.24.0@sha256:3af31f8bd1ad2907b4b0f7c485fde3de0a8ee0b498d42fc971f0698885c03acb", + ), + ], +) +def test_get_name_from_url(url: str, name: str): + assert services.ImageService.get_name_from_url(url) == name + + +@pytest.mark.parametrize( + ("go_arch", "charm_arch"), + [ + *( + (key, const.CharmArch(value)) + for key, value in const.GO_ARCH_TO_CHARM_ARCH.items() + ), + ("amd64", "amd64"), + ("arm64", "arm64"), + ("riscv64", "riscv64"), + ("s390x", "s390x"), + ], +) +def test_convert_go_acrh_to_charm_arch(go_arch: str, charm_arch: const.CharmArch): + assert services.ImageService.convert_go_arch_to_charm_arch(go_arch) == charm_arch + + +def test_get_maybe_id_from_docker_success( + image_service: services.ImageService, mock_docker +): expected = "sha256:some-sha-hash" - mock_docker.images.get.return_value = docker.models.images.Image(attrs={"Id": expected}) + mock_docker.images.get.return_value = docker.models.images.Image( + attrs={"Id": expected} + ) result = image_service.get_maybe_id_from_docker("some-image") @@ -57,19 +108,32 @@ def test_get_maybe_id_from_docker_success(image_service: services.ImageService, assert result == expected -def test_get_maybe_id_from_docker_failure(image_service: services.ImageService, mock_docker): +def test_get_maybe_id_from_docker_failure( + image_service: services.ImageService, mock_docker +): mock_docker.images.get.side_effect = docker.errors.ImageNotFound("womp womp") assert image_service.get_maybe_id_from_docker("some-image") is None +def test_get_maybe_id_from_docker_no_docker(image_service: services.ImageService): + image_service._docker = None + + assert image_service.get_maybe_id_from_docker("some-image") is None + + @pytest.mark.parametrize("image", ["my-image"]) @pytest.mark.parametrize("architecture", const.CharmArch) def test_inspect_single_arch( - fake_process, image_service: services.ImageService, mock_skopeo, image: str, architecture + fake_process, + image_service: services.ImageService, + mock_skopeo, + image: str, + architecture, ): fake_process.register( - ["/skopeo", "inspect", "--raw", image], stdout=json.dumps({"raw_manifest": True}) + ["/skopeo", "inspect", "--raw", image], + stdout=json.dumps({"raw_manifest": True}), ) fake_process.register( ["/skopeo", "inspect", image], @@ -86,7 +150,11 @@ def test_inspect_single_arch( @pytest.mark.parametrize("image", ["my-image"]) @pytest.mark.parametrize("architectures", itertools.product(const.CharmArch, repeat=2)) def test_inspect_two_arch( - fake_process, image_service: services.ImageService, mock_skopeo, image: str, architectures + fake_process, + image_service: services.ImageService, + mock_skopeo, + image: str, + architectures, ): fake_process.register( ["/skopeo", "inspect", "--raw", image], diff --git a/tests/unit/services/test_lifecycle.py b/tests/unit/services/test_lifecycle.py index cb0e0964e..a5ad0b0ac 100644 --- a/tests/unit/services/test_lifecycle.py +++ b/tests/unit/services/test_lifecycle.py @@ -42,7 +42,9 @@ def service(service_factory) -> LifecycleService: (f"foreign-{HOST_ARCH}", "foreign"), ], ) -def test_get_build_for_values(service: LifecycleService, plan_build_for: str, expected: str): +def test_get_build_for_values( + service: LifecycleService, plan_build_for: str, expected: str +): service._build_plan = [ models.BuildInfo( base=bases.BaseName("ubuntu", "22.04"), diff --git a/tests/unit/services/test_package.py b/tests/unit/services/test_package.py index 9ac506243..c11437b1f 100644 --- a/tests/unit/services/test_package.py +++ b/tests/unit/services/test_package.py @@ -32,7 +32,9 @@ from charmcraft.application.main import APP_METADATA from charmcraft.models.project import BasesCharm -SIMPLE_BUILD_BASE = models.charmcraft.Base(name="ubuntu", channel="22.04", architectures=["arm64"]) +SIMPLE_BUILD_BASE = models.charmcraft.Base( + name="ubuntu", channel="22.04", architectures=["arm64"] +) SIMPLE_MANIFEST = models.Manifest( charmcraft_started_at="1970-01-01T00:00:00+00:00", bases=[SIMPLE_BUILD_BASE], @@ -112,7 +114,9 @@ def test_get_charm_path(fake_path, package_service, bases, expected_name): ], ) def test_get_manifest(package_service, simple_charm, lint, expected): - simple_charm._started_at = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc) + simple_charm._started_at = datetime.datetime( + 1970, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc + ) assert package_service.get_manifest(lint) == expected @@ -181,10 +185,18 @@ def test_do_not_overwrite_actions_yaml( [ { "build-on": [ - {"name": "ubuntu", "channel": "22.04", "architectures": ["riscv64"]} + { + "name": "ubuntu", + "channel": "22.04", + "architectures": ["riscv64"], + } ], "run-on": [ - {"name": "ubuntu", "channel": "22.04", "architectures": ["all"]}, + { + "name": "ubuntu", + "channel": "22.04", + "architectures": ["all"], + }, ], }, ], @@ -206,20 +218,38 @@ def test_do_not_overwrite_actions_yaml( build_for=util.get_host_architecture(), base=BaseName("centos", "7"), ), - [{"name": "centos", "channel": "7", "architectures": [util.get_host_architecture()]}], + [ + { + "name": "centos", + "channel": "7", + "architectures": [util.get_host_architecture()], + } + ], ), pytest.param( [ {"name": "centos", "channel": "7"}, { "build-on": [{"name": "ubuntu", "channel": "20.04"}], - "run-on": [{"name": "ubuntu", "channel": "20.04", "architectures": ["all"]}], + "run-on": [ + {"name": "ubuntu", "channel": "20.04", "architectures": ["all"]} + ], }, { "build-on": [ - {"name": "ubuntu", "channel": "22.04", "architectures": ["amd64", "arm64"]} + { + "name": "ubuntu", + "channel": "22.04", + "architectures": ["amd64", "arm64"], + } + ], + "run-on": [ + { + "name": "ubuntu", + "channel": "22.04", + "architectures": ["arm64"], + } ], - "run-on": [{"name": "ubuntu", "channel": "22.04", "architectures": ["arm64"]}], }, ], BuildInfo( diff --git a/tests/unit/services/test_provider.py b/tests/unit/services/test_provider.py index b55f6c48d..d0cd16475 100644 --- a/tests/unit/services/test_provider.py +++ b/tests/unit/services/test_provider.py @@ -15,12 +15,23 @@ # For further info, check https://github.com/canonical/charmcraft """Unit tests for the provider service.""" +try: + import fcntl +except ModuleNotFoundError: # Windows + fcntl = None +import functools import pathlib +import sys +from collections.abc import Iterator +from unittest import mock import pytest +from craft_cli.pytest_plugin import RecordingEmitter from craft_providers import bases from charmcraft import models, services +from charmcraft.application.main import APP_METADATA +from charmcraft.services.provider import _maybe_lock_cache @pytest.fixture @@ -42,6 +53,17 @@ def provider_service( return service_factory.provider +@pytest.fixture +def mock_register(monkeypatch) -> Iterator[mock.Mock]: + register = mock.Mock() + monkeypatch.setattr("atexit.register", register) + yield register + + # Call the exit hooks as if exiting the application. + for hook in register.mock_calls: + functools.partial(*hook.args)() + + @pytest.mark.parametrize( "base_name", [ @@ -62,13 +84,16 @@ def provider_service( bases.BaseName("almalinux", "9"), ], ) +@pytest.mark.skipif(sys.platform == "win32", reason="no cache on windows") def test_get_base_forwards_cache( monkeypatch, provider_service: services.ProviderService, fake_path: pathlib.Path, base_name: bases.BaseName, ): - monkeypatch.setattr("charmcraft.env.get_host_shared_cache_path", lambda: fake_path / "cache") + monkeypatch.setattr( + "charmcraft.env.get_host_shared_cache_path", lambda: fake_path / "cache" + ) base = provider_service.get_base( base_name=base_name, @@ -76,3 +101,67 @@ def test_get_base_forwards_cache( ) assert base._cache_path == fake_path / "cache" + + +@pytest.mark.parametrize( + "base_name", + [ + bases.BaseName("ubuntu", "20.04"), + bases.BaseName("ubuntu", "22.04"), + bases.BaseName("ubuntu", "24.04"), + bases.BaseName("ubuntu", "devel"), + bases.BaseName("almalinux", "9"), + ], +) +@pytest.mark.skipif(sys.platform == "win32", reason="no cache on windows") +def test_get_base_no_cache_if_locked( + monkeypatch, + mock_register, + tmp_path: pathlib.Path, + base_name: bases.BaseName, + emitter: RecordingEmitter, +): + cache_path = tmp_path / "cache" + cache_path.mkdir(exist_ok=True, parents=True) + + # Make a new path object to work around caching the paths and thus getting the + # same file descriptor. + locked = _maybe_lock_cache(cache_path) + assert locked + new_cache_path = pathlib.Path(str(cache_path)) + monkeypatch.setattr( + "charmcraft.env.get_host_shared_cache_path", lambda: new_cache_path + ) + + # Can't use the fixture as pyfakefs doesn't handle locks. + provider_service = services.ProviderService( + app=APP_METADATA, + services=None, # pyright: ignore[reportArgumentType] + project=None, # pyright: ignore[reportArgumentType] + work_dir=tmp_path, + build_plan=[], + ) + + base = provider_service.get_base( + base_name=base_name, + instance_name="charmcraft-test-instance", + ) + + assert base._cache_path is None + emitter.assert_progress( + "Shared cache locked by another process; running without cache.", + permanent=True, + ) + + +@pytest.mark.skipif(sys.platform == "win32", reason="no cache on windows") +def test_maybe_lock_cache_locks_single_lock(tmp_path: pathlib.Path) -> None: + assert _maybe_lock_cache(tmp_path) + + +@pytest.mark.skipif(sys.platform == "win32", reason="no cache on windows") +def test_maybe_lock_cache_with_another_lock(tmp_path: pathlib.Path) -> None: + # Need to save the open file so it's not closed when we try a second time. + first_file_descriptor = _maybe_lock_cache(tmp_path) + assert first_file_descriptor + assert _maybe_lock_cache(tmp_path) is None diff --git a/tests/unit/services/test_store.py b/tests/unit/services/test_store.py index 8407d6ce4..55dadff43 100644 --- a/tests/unit/services/test_store.py +++ b/tests/unit/services/test_store.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Tests for the store service.""" + import platform from typing import cast from unittest import mock @@ -34,7 +35,9 @@ @pytest.fixture def store(service_factory) -> services.StoreService: - store = services.StoreService(app=application.APP_METADATA, services=service_factory) + store = services.StoreService( + app=application.APP_METADATA, services=service_factory + ) store.client = mock.Mock(spec_set=client.Client) store.anonymous_client = mock.Mock(spec_set=client.AnonymousClient) return store @@ -48,7 +51,10 @@ def reusable_store(): def test_user_agent(store): - assert store._user_agent == f"Charmcraft/{charmcraft.__version__} ({store._ua_system_info})" + assert ( + store._user_agent + == f"Charmcraft/{charmcraft.__version__} ({store._ua_system_info})" + ) @pytest.mark.parametrize("system", ["Windows", "Macos"]) @@ -65,7 +71,10 @@ def test_ua_system_info_non_linux( monkeypatch.setattr(platform, "python_implementation", lambda: python) monkeypatch.setattr(platform, "python_version", lambda: python_version) - assert store._ua_system_info == f"{system} {release}; {machine}; {python} {python_version}" + assert ( + store._ua_system_info + == f"{system} {release}; {machine}; {python} {python_version}" + ) @pytest.mark.parametrize("machine", ["x86_64", "arm64", "riscv64"]) @@ -91,7 +100,9 @@ def test_ua_system_info_linux( def test_setup_with_error(emitter: RecordingEmitter, store): - store.ClientClass = mock.Mock(side_effect=[craft_store.errors.NoKeyringError, "I am a store!"]) + store.ClientClass = mock.Mock( + side_effect=[craft_store.errors.NoKeyringError, "I am a store!"] + ) store.setup() @@ -129,15 +140,23 @@ def test_login(reusable_store, permissions, description, ttl, channels): ) client.login.assert_called_once_with( - permissions=permissions, description=description, ttl=ttl, packages=None, channels=channels + permissions=permissions, + description=description, + ttl=ttl, + packages=None, + channels=channels, ) def test_login_failure(store): client = cast(mock.Mock, store.client) - client.login.side_effect = craft_store.errors.CredentialsAlreadyAvailable("charmcraft", "host") + client.login.side_effect = craft_store.errors.CredentialsAlreadyAvailable( + "charmcraft", "host" + ) - with pytest.raises(errors.CraftError, match="Cannot login because credentials were found"): + with pytest.raises( + errors.CraftError, match="Cannot login because credentials were found" + ): store.login() @@ -158,7 +177,11 @@ def test_logout(store): [ models.CharmResourceRevisionUpdateRequest( revision=123, - bases=[models.RequestCharmResourceBase(architectures=["amd64", "riscv64"])], + bases=[ + models.RequestCharmResourceBase( + architectures=["amd64", "riscv64"] + ) + ], ) ], ), @@ -170,7 +193,11 @@ def test_logout(store): [ models.CharmResourceRevisionUpdateRequest( revision=123, - bases=[models.RequestCharmResourceBase(architectures=["amd64", "riscv64"])], + bases=[ + models.RequestCharmResourceBase( + architectures=["amd64", "riscv64"] + ) + ], ), models.CharmResourceRevisionUpdateRequest( revision=456, @@ -180,7 +207,9 @@ def test_logout(store): ), ], ) -def test_set_resource_revisions_architectures_request_form(store, updates, expected_request): +def test_set_resource_revisions_architectures_request_form( + store, updates, expected_request +): store.client.list_resource_revisions.return_value = [] store.set_resource_revisions_architectures("my-charm", "my-file", updates) @@ -199,10 +228,18 @@ def test_set_resource_revisions_architectures_request_form(store, updates, expec ( {123: ["all"]}, [ - get_fake_revision(bases=[models.ResponseCharmResourceBase()], revision=0), - get_fake_revision(bases=[models.ResponseCharmResourceBase()], revision=123), + get_fake_revision( + bases=[models.ResponseCharmResourceBase()], revision=0 + ), + get_fake_revision( + bases=[models.ResponseCharmResourceBase()], revision=123 + ), + ], + [ + get_fake_revision( + bases=[models.ResponseCharmResourceBase()], revision=123 + ) ], - [get_fake_revision(bases=[models.ResponseCharmResourceBase()], revision=123)], ), ], ) @@ -251,12 +288,20 @@ def test_get_credentials(monkeypatch, store): ), ( [CharmLib(lib="my_charm.my_lib", version="1.0")], - [{"charm-name": "my-charm", "library-name": "my_lib", "api": 1, "patch": 0}], + [ + { + "charm-name": "my-charm", + "library-name": "my_lib", + "api": 1, + "patch": 0, + } + ], ), ], ) def test_fetch_libraries_metadata(monkeypatch, store, libs, expected_call): - store.get_libraries_metadata(libs) - store.anonymous_client.fetch_libraries_metadata.assert_called_once_with(expected_call) + store.anonymous_client.fetch_libraries_metadata.assert_called_once_with( + expected_call + ) diff --git a/tests/unit/store/test_client.py b/tests/unit/store/test_client.py index e739272e7..6177f3012 100644 --- a/tests/unit/store/test_client.py +++ b/tests/unit/store/test_client.py @@ -29,7 +29,9 @@ def client() -> store.Client: @pytest.fixture def anonymous_client() -> store.AnonymousClient: - return store.AnonymousClient("http://charmhub.local", "http://storage.charmhub.local") + return store.AnonymousClient( + "http://charmhub.local", "http://storage.charmhub.local" + ) @pytest.mark.parametrize( @@ -48,7 +50,9 @@ def anonymous_client() -> store.AnonymousClient: 0, 0, mock.call( - "GET", "/v1/charm/libraries/my-charm/abcdefg", params={"api": 0, "patch": 0} + "GET", + "/v1/charm/libraries/my-charm/abcdefg", + params={"api": 0, "patch": 0}, ), ), ], @@ -68,7 +72,9 @@ def test_get_library_success( ) monkeypatch.setattr(anonymous_client, "request_urlpath_json", mock_get_urlpath_json) - anonymous_client.get_library(charm_name=charm, library_id=lib_id, api=api, patch=patch) + anonymous_client.get_library( + charm_name=charm, library_id=lib_id, api=api, patch=patch + ) mock_get_urlpath_json.assert_has_calls([expected_call]) @@ -105,7 +111,9 @@ def test_get_library_success( ), ], ) -def test_fetch_libraries_metadata(monkeypatch, anonymous_client, libs, json_response, expected): +def test_fetch_libraries_metadata( + monkeypatch, anonymous_client, libs, json_response, expected +): mock_get_urlpath_json = mock.Mock(return_value=json_response) monkeypatch.setattr(anonymous_client, "request_urlpath_json", mock_get_urlpath_json) diff --git a/tests/unit/test_application.py b/tests/unit/test_application.py index c8cd28654..7a7405014 100644 --- a/tests/unit/test_application.py +++ b/tests/unit/test_application.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Unit tests for application class.""" + import textwrap from unittest import mock @@ -70,7 +71,13 @@ ) @pytest.mark.parametrize( "expected", - [{"name": "test-charm", "summary": "A test charm", "description": "A charm for testing!"}], + [ + { + "name": "test-charm", + "summary": "A test charm", + "description": "A charm for testing!", + } + ], ) def test_extra_yaml_transform_success( fs: pyfakefs.fake_filesystem.FakeFilesystem, @@ -83,7 +90,9 @@ def test_extra_yaml_transform_success( fs.create_file("metadata.yaml", contents=metadata_yaml) app = application.Charmcraft(app=application.APP_METADATA, services=service_factory) - actual = app._extra_yaml_transform(charmcraft_dict, build_on="amd64", build_for=None) + actual = app._extra_yaml_transform( + charmcraft_dict, build_on="amd64", build_for=None + ) assert actual == expected @@ -224,7 +233,9 @@ def test_deprecated_prime_warning( }, id="named-reactive", ), - pytest.param({"parts": {"my-part": {"plugin": "reactive"}}}, id="reactive-plugin"), + pytest.param( + {"parts": {"my-part": {"plugin": "reactive"}}}, id="reactive-plugin" + ), pytest.param( { "parts": {"bundle": {}}, @@ -285,7 +296,9 @@ def test_expand_environment_multi_arch( ) -> None: mock_parent_expand_environment = mock.Mock() monkeypatch.setattr( - craft_application.Application, "_expand_environment", mock_parent_expand_environment + craft_application.Application, + "_expand_environment", + mock_parent_expand_environment, ) app = application.Charmcraft(app=application.APP_METADATA, services=service_factory) diff --git a/tests/unit/test_charm_builder.py b/tests/unit/test_charm_builder.py index 24fe5d300..553b06fd3 100644 --- a/tests/unit/test_charm_builder.py +++ b/tests/unit/test_charm_builder.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Unit tests for CharmBuilder.""" + import pathlib import pytest @@ -77,7 +78,9 @@ def test_install_strict_dependencies_pip_failure( fs, fake_process: FakeProcess, builder, requirements ): fs.create_file("requirements.txt", contents=requirements) - no_binary_packages = utils.get_package_names(requirements.splitlines(keepends=False)) + no_binary_packages = utils.get_package_names( + requirements.splitlines(keepends=False) + ) no_binary_packages_str = ",".join(sorted(no_binary_packages)) fake_process.register( [ diff --git a/tests/unit/test_dispatch.py b/tests/unit/test_dispatch.py new file mode 100644 index 000000000..dccfb92ad --- /dev/null +++ b/tests/unit/test_dispatch.py @@ -0,0 +1,74 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Unit tests for dispatch script creation.""" + +import pathlib + +import pytest +import pytest_check + +from charmcraft import const, dispatch + + +def test_create_dispatch_hooks_exist(fake_path: pathlib.Path): + """Test that nothing happens if a hooks directory exists.""" + prime_dir = fake_path / "prime" + (prime_dir / const.HOOKS_DIRNAME).mkdir(parents=True) + + pytest_check.is_false(dispatch.create_dispatch(prime_dir=prime_dir)) + + pytest_check.is_false((prime_dir / const.DISPATCH_FILENAME).exists()) + + +def test_create_dispatch_dispatch_exists(fake_path: pathlib.Path): + """Test that nothing happens if dispatch file already exists.""" + prime_dir = fake_path / "prime" + prime_dir.mkdir() + dispatch_path = prime_dir / const.DISPATCH_FILENAME + dispatch_path.write_text("DO NOT OVERWRITE") + + pytest_check.is_false(dispatch.create_dispatch(prime_dir=prime_dir)) + + pytest_check.equal(dispatch_path.read_text(), "DO NOT OVERWRITE") + + +@pytest.mark.parametrize("entrypoint", ["src/charm.py", "src/some_entrypoint.py"]) +def test_create_dispatch_no_entrypoint(fake_path: pathlib.Path, entrypoint): + prime_dir = fake_path / "prime" + prime_dir.mkdir() + dispatch_path = prime_dir / const.DISPATCH_FILENAME + + pytest_check.is_false( + dispatch.create_dispatch(prime_dir=prime_dir, entrypoint=entrypoint) + ) + + pytest_check.is_false(dispatch_path.exists()) + + +@pytest.mark.parametrize("entrypoint", ["src/charm.py", "src/some_entrypoint.py"]) +def test_create_dispatch_with_entrypoint(fake_path: pathlib.Path, entrypoint): + prime_dir = fake_path / "prime" + prime_dir.mkdir() + entrypoint = prime_dir / entrypoint + entrypoint.parent.mkdir(parents=True, exist_ok=True) + entrypoint.touch() + dispatch_file = prime_dir / const.DISPATCH_FILENAME + expected = dispatch.DISPATCH_SCRIPT_TEMPLATE.format(entrypoint=entrypoint) + + pytest_check.is_true( + dispatch.create_dispatch(prime_dir=prime_dir, entrypoint=entrypoint) + ) + pytest_check.equal(dispatch_file.read_text(), expected) diff --git a/tests/unit/test_linters.py b/tests/unit/test_linters.py new file mode 100644 index 000000000..ca1e72073 --- /dev/null +++ b/tests/unit/test_linters.py @@ -0,0 +1,138 @@ +# Copyright 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# For further info, check https://github.com/canonical/charmcraft +"""Unit tests for linters.""" + +import pathlib +import subprocess +import sys + +import pytest + +from charmcraft import linters +from charmcraft.models.lint import LintResult + + +@pytest.fixture +def valid_venv_path(fake_path) -> pathlib.Path: + """Create and return a fakefs path that contains a valid venv structure""" + (fake_path / "venv" / "lib").mkdir(parents=True) + return fake_path + + +def test_pip_check_not_venv(fake_path: pathlib.Path): + lint = linters.PipCheck() + assert lint.run(fake_path) == LintResult.NONAPPLICABLE + assert lint.text == "Charm does not contain a Python venv." + + +def test_pip_invalid_venv(fake_path: pathlib.Path): + (fake_path / "venv").mkdir() + lint = linters.PipCheck() + assert lint.run(fake_path) == LintResult.NONAPPLICABLE + assert lint.text == "Python venv is not valid." + + +@pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported.") +def test_pip_check_success(valid_venv_path: pathlib.Path, fp): + fp.register( + [sys.executable, "-m", "pip", "--python", fp.any(), "check"], + returncode=0, + stdout="Loo loo loo, doing pip stuff. Pip stuff is my favourite stuff.", + ) + + lint = linters.PipCheck() + assert lint.run(valid_venv_path) == LintResult.OK + assert lint.text == linters.PipCheck.text + + +@pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported.") +def test_pip_check_warning(valid_venv_path: pathlib.Path, fp): + fp.register( + [sys.executable, "-m", "pip", "--python", fp.any(), "check"], + returncode=1, + stdout="This error was sponsored by Raytheon Knife Missiles™", + ) + + lint = linters.PipCheck() + assert lint.run(valid_venv_path) == LintResult.WARNING + assert lint.text == "This error was sponsored by Raytheon Knife Missiles™" + + +@pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported.") +def test_pip_check_exception(valid_venv_path: pathlib.Path, monkeypatch): + def _raises_eperm(*args, **kwargs) -> None: + raise PermissionError(13, "Permission denied") + + monkeypatch.setattr(subprocess, "run", _raises_eperm) + + lint = linters.PipCheck() + assert lint.run(valid_venv_path) == LintResult.NONAPPLICABLE + assert ( + lint.text + == f"Permission denied: Could not run Python executable at {sys.executable}." + ) + + +@pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported.") +def test_pip_check_repair_no_bin(valid_venv_path: pathlib.Path, fp): + """Check that the bin directory is deleted if it was missing before""" + fp.register( + [sys.executable, "-m", "pip", "--python", fp.any(), "check"], + returncode=0, + stdout="Gosh, I sure hope I remember where everything went.", + ) + lint = linters.PipCheck() + + # Make sure it doesn't leave behind "bin" if it didn't exist + assert lint.run(valid_venv_path) == LintResult.OK + assert lint.text == "Virtual environment is valid." + assert not (valid_venv_path / "venv" / "bin").exists() + + +@pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported.") +def test_pip_check_repair_no_py(valid_venv_path: pathlib.Path, fp): + """Check that the python symlink is deleted if it was missing before""" + fp.register( + [sys.executable, "-m", "pip", "--python", fp.any(), "check"], + returncode=0, + stdout="Gosh, I sure hope I remember where everything went.", + ) + lint = linters.PipCheck() + + # Make sure it keeps "bin" if only the Python binary didn't exist + (valid_venv_path / "venv" / "bin").mkdir() + assert lint.run(valid_venv_path) == LintResult.OK + assert lint.text == "Virtual environment is valid." + assert (valid_venv_path / "venv" / "bin").exists() + assert not (valid_venv_path / "venv" / "bin" / "python").exists() + + +@pytest.mark.skipif(sys.platform == "win32", reason="Windows not [yet] supported.") +def test_pip_check_repair_all(valid_venv_path: pathlib.Path, fp): + """Check that nothing is changed if all components are present""" + fp.register( + [sys.executable, "-m", "pip", "--python", fp.any(), "check"], + returncode=0, + stdout="Gosh, I sure hope I remember where everything went.", + ) + lint = linters.PipCheck() + + (valid_venv_path / "venv" / "bin").mkdir() + (valid_venv_path / "venv" / "bin" / "python").touch() + + assert lint.run(valid_venv_path) == LintResult.OK + assert lint.text == "Virtual environment is valid." + assert (valid_venv_path / "venv" / "bin" / "python").is_file() diff --git a/tests/unit/test_parts.py b/tests/unit/test_parts.py index f3195c240..3292b8bce 100644 --- a/tests/unit/test_parts.py +++ b/tests/unit/test_parts.py @@ -35,12 +35,20 @@ {"charm-requirements": ["requirements.txt"]}, ), ( - {"charm-requirements": ["requirements.txt"], "charm-binary-python-packages": ["ops"]}, - {"charm-requirements": ["requirements.txt"], "charm-binary-python-packages": ["ops"]}, + { + "charm-requirements": ["requirements.txt"], + "charm-binary-python-packages": ["ops"], + }, + { + "charm-requirements": ["requirements.txt"], + "charm-binary-python-packages": ["ops"], + }, ), ], ) -def test_partconfig_strict_dependencies_success(fs: FakeFilesystem, part_config, expected): +def test_partconfig_strict_dependencies_success( + fs: FakeFilesystem, part_config, expected +): """Test various success scenarios for a charm part with strict dependencies.""" for file in part_config.get("charm-requirements", ["requirements.txt"]): fs.create_file(file, contents="ops~=2.5") @@ -61,10 +69,15 @@ def test_partconfig_strict_dependencies_success(fs: FakeFilesystem, part_config, {"charm-requirements": ["req.txt"], "charm-python-packages": ["ops"]}, "Value error, 'charm-python-packages' must not be set if 'charm-strict-dependencies' is enabled", ), - ({}, "Value error, 'charm-strict-dependencies' requires at least one requirements file."), + ( + {}, + "Value error, 'charm-strict-dependencies' requires at least one requirements file.", + ), ], ) -def test_partconfig_strict_dependencies_failure(fs: FakeFilesystem, part_config, message): +def test_partconfig_strict_dependencies_failure( + fs: FakeFilesystem, part_config, message +): """Test failure scenarios for a charm part with strict dependencies.""" for file in part_config.get("charm-requirements", []): fs.create_file(file, contents="ops==2.5.1\n") diff --git a/tests/unit/test_preprocess.py b/tests/unit/test_preprocess.py index 137d1f82a..e8f65360d 100644 --- a/tests/unit/test_preprocess.py +++ b/tests/unit/test_preprocess.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Tests for project pre-processing functions.""" + import pathlib import textwrap @@ -21,7 +22,10 @@ from charmcraft import const, errors, preprocess -BASIC_BUNDLE = {"type": "bundle", "parts": {"bundle": {"plugin": "bundle", "source": "."}}} +BASIC_BUNDLE = { + "type": "bundle", + "parts": {"bundle": {"plugin": "bundle", "source": "."}}, +} BASIC_CHARM = {"type": "charm", "parts": {"charm": {"plugin": "charm", "source": "."}}} BASIC_BASES_CHARM = {**BASIC_CHARM, "bases": [{"name": "ubuntu", "channel": "22.04"}]} @@ -34,7 +38,11 @@ pytest.param(BASIC_BUNDLE.copy(), BASIC_BUNDLE, id="prefilled-bundle"), pytest.param( {"type": "charm", "bases": []}, - {"type": "charm", "bases": [], "parts": {"charm": {"plugin": "charm", "source": "."}}}, + { + "type": "charm", + "bases": [], + "parts": {"charm": {"plugin": "charm", "source": "."}}, + }, id="empty-charm", ), pytest.param(BASIC_CHARM.copy(), BASIC_CHARM, id="basic-charm"), @@ -50,7 +58,9 @@ def test_add_default_parts_correct(yaml_data, expected): ("yaml_data", "metadata_yaml", "expected"), [ pytest.param({}, None, {}, id="nonexistent"), - pytest.param({}, "{}", {"name": None, "summary": None, "description": None}, id="empty"), + pytest.param( + {}, "{}", {"name": None, "summary": None, "description": None}, id="empty" + ), pytest.param( {"name": "my-charm"}, "summary: a charm", @@ -116,7 +126,10 @@ def test_extra_yaml_transform_failure(fs, yaml_data, metadata_yaml, message): [ pytest.param({}, "", {}, id="non-bundle"), pytest.param( - {"type": "bundle"}, "{}", {"type": "bundle", "bundle": {}}, id="empty-bundle" + {"type": "bundle"}, + "{}", + {"type": "bundle", "bundle": {}}, + id="empty-bundle", ), ], ) @@ -156,7 +169,11 @@ def test_add_bundle_snippet_invalid_file(fs, contents): ("yaml_data", "config_yaml", "expected"), [ ({}, "{}", {"config": {}}), - ({}, "options:\n boop:\n type: int", {"config": {"options": {"boop": {"type": "int"}}}}), + ( + {}, + "options:\n boop:\n type: int", + {"config": {"options": {"boop": {"type": "int"}}}}, + ), ], ) def test_add_config_success(fs, yaml_data, config_yaml, expected): diff --git a/tests/unit/utils/test_charmlibs.py b/tests/unit/utils/test_charmlibs.py index 34a678df2..2ed271202 100644 --- a/tests/unit/utils/test_charmlibs.py +++ b/tests/unit/utils/test_charmlibs.py @@ -15,6 +15,7 @@ # For further info, check https://github.com/canonical/charmcraft """Tests for store helpers commands (code in store/charmlibs.py).""" + import hashlib import pathlib import sys @@ -24,60 +25,116 @@ from charmcraft import const from charmcraft.utils.charmlibs import ( + QualifiedLibraryName, collect_charmlib_pydeps, + get_lib_charm_path, get_lib_info, get_lib_internals, get_lib_module_name, get_lib_path, get_libs_from_tree, - get_name_from_metadata, + get_name_from_yaml, +) + + +@pytest.mark.parametrize( + ("value", "expected"), + [("my-charm.my_lib", QualifiedLibraryName("my_charm", "my_lib"))], +) +def test_qualified_library_name_from_string_success( + value: str, expected: QualifiedLibraryName +) -> None: + assert QualifiedLibraryName.from_string(value) == expected + + +@pytest.mark.parametrize( + ("value", "expected"), + [(QualifiedLibraryName("my_charm", "my_lib"), "my-charm.my_lib")], ) +def test_qualified_library_name_to_string_success( + value: str, expected: QualifiedLibraryName +) -> None: + assert str(value) == expected + + +@pytest.mark.parametrize("value", ["", "charm-name", "charm-name.", ".", ".lib_name"]) +def test_qualified_library_name_from_string_error(value: str): + with pytest.raises(ValueError, match="Not a valid library name: "): + QualifiedLibraryName.from_string(value) # region Name-related tests -def test_get_name_from_metadata_ok(tmp_path, monkeypatch): +@pytest.mark.parametrize( + ("file_name"), + [const.METADATA_FILENAME, const.CHARMCRAFT_FILENAME], +) +def test_get_name_from_yaml_ok(tmp_path, monkeypatch, file_name): """The metadata file is valid yaml, but there is no name.""" monkeypatch.chdir(tmp_path) - # put a valid metadata + # put a valid yaml + yaml_file = tmp_path / file_name + with yaml_file.open("wb") as fh: + fh.write(b"name: test-name") + + result = get_name_from_yaml() + assert result == "test-name" + + +def test_get_name_from_yaml_both_exist_metadata_has_name(tmp_path, monkeypatch): + """The metadata file is valid yaml, but there is no name.""" + monkeypatch.chdir(tmp_path) + + # put a valid yaml, but name is in metadata.yaml + charmcraft_file = tmp_path / const.CHARMCRAFT_FILENAME + with charmcraft_file.open("wb") as fh: + fh.write(b"notname: test-name") metadata_file = tmp_path / const.METADATA_FILENAME with metadata_file.open("wb") as fh: fh.write(b"name: test-name") - result = get_name_from_metadata() + result = get_name_from_yaml() assert result == "test-name" -def test_get_name_from_metadata_no_file(tmp_path, monkeypatch): +def test_get_name_from_yaml_no_file(tmp_path, monkeypatch): """No metadata file to get info.""" monkeypatch.chdir(tmp_path) - result = get_name_from_metadata() + result = get_name_from_yaml() assert result is None -def test_get_name_from_metadata_bad_content_garbage(tmp_path, monkeypatch): +@pytest.mark.parametrize( + ("file_name"), + [const.METADATA_FILENAME, const.CHARMCRAFT_FILENAME], +) +def test_get_name_from_yaml_bad_content_garbage(tmp_path, monkeypatch, file_name): """The metadata file is broken.""" monkeypatch.chdir(tmp_path) - # put a broken metadata - metadata_file = tmp_path / const.METADATA_FILENAME + # put a broken yaml + metadata_file = tmp_path / file_name with metadata_file.open("wb") as fh: fh.write(b"\b00\bff -- not a really yaml stuff") - result = get_name_from_metadata() + result = get_name_from_yaml() assert result is None -def test_get_name_from_metadata_bad_content_no_name(tmp_path, monkeypatch): +@pytest.mark.parametrize( + ("file_name"), + [const.METADATA_FILENAME, const.CHARMCRAFT_FILENAME], +) +def test_get_name_from_yaml_bad_content_no_name(tmp_path, monkeypatch, file_name): """The metadata file is valid yaml, but there is no name.""" monkeypatch.chdir(tmp_path) - # put a broken metadata - metadata_file = tmp_path / const.METADATA_FILENAME + # put a broken yaml + metadata_file = tmp_path / file_name with metadata_file.open("wb") as fh: fh.write(b"{}") - result = get_name_from_metadata() + result = get_name_from_yaml() assert result is None @@ -91,6 +148,16 @@ def test_get_lib_path(charm: str, lib: str, api: int, expected: pathlib.Path): assert get_lib_path(charm, lib, api) == expected +@pytest.mark.parametrize( + ("charm", "expected"), + [ + ("my-charm", pathlib.Path("lib/charms/my_charm")), + ], +) +def test_get_lib_charm_path(charm: str, expected: pathlib.Path): + assert get_lib_charm_path(charm) == expected + + @pytest.mark.parametrize( ("charm", "lib", "api", "expected"), [ @@ -162,6 +229,23 @@ def test_getlibinfo_success_simple(tmp_path, monkeypatch): assert lib_data.charm_name == "testcharm" +def test_getlibinfo_success_absolute_path(tmp_path, monkeypatch): + """Simple basic case of success getting info from the library.""" + monkeypatch.chdir(tmp_path) + test_path = _create_lib() + + lib_data = get_lib_info(lib_path=test_path.absolute()) + assert lib_data.lib_id == "test-lib-id" + assert lib_data.api == 3 + assert lib_data.patch == 14 + assert lib_data.content_hash is not None + assert lib_data.content is not None + assert lib_data.full_name == "charms.testcharm.v3.testlib" + assert lib_data.path == test_path.absolute() + assert lib_data.lib_name == "testlib" + assert lib_data.charm_name == "testcharm" + + @pytest.mark.parametrize( "name", [ @@ -237,7 +321,10 @@ def test_getlibinfo_missing_library_from_name(): assert lib_data.content_hash is None assert lib_data.content is None assert lib_data.full_name == test_name - assert lib_data.path == pathlib.Path("lib") / "charms" / "testcharm" / "v3" / "testlib.py" + assert ( + lib_data.path + == pathlib.Path("lib") / "charms" / "testcharm" / "v3" / "testlib.py" + ) assert lib_data.lib_name == "testlib" assert lib_data.charm_name == "testcharm" @@ -312,13 +399,18 @@ def test_getlibinternals_success_content(tmp_path, monkeypatch): internals = get_lib_internals(test_path) assert internals.content == test_path.read_text(encoding="utf8") - assert internals.content_hash == hashlib.sha256(extra_content.encode("utf8")).hexdigest() + assert ( + internals.content_hash + == hashlib.sha256(extra_content.encode("utf8")).hexdigest() + ) def test_getlibinternals_non_toplevel_names(tmp_path, monkeypatch): """Test non direct assignments.""" monkeypatch.chdir(tmp_path) - test_path = _create_lib(extra_content="logging.getLogger('kazoo.client').disabled = True") + test_path = _create_lib( + extra_content="logging.getLogger('kazoo.client').disabled = True" + ) internals = get_lib_internals(test_path) assert internals.lib_id == "test-lib-id" @@ -349,7 +441,9 @@ def test_getlibinternals_malformed_content(tmp_path, monkeypatch): (["metadata_patch", "metadata_id"], "LIBID, LIBPATCH"), ], ) -def test_getlibinternals_missing_internals_field(tmp_path, empty_args, missing, monkeypatch): +def test_getlibinternals_missing_internals_field( + tmp_path, empty_args, missing, monkeypatch +): """Some internals field is not present.""" monkeypatch.chdir(tmp_path) kwargs = {arg: "" for arg in empty_args} @@ -512,7 +606,9 @@ def test_collectpydeps_generic(tmp_path, monkeypatch): otherdir = tmp_path / "otherdir" otherdir.mkdir() monkeypatch.chdir(otherdir) - _create_lib(charm_name="charm1", lib_name="lib1.py", pydeps="PYDEPS = ['foo', 'bar']") + _create_lib( + charm_name="charm1", lib_name="lib1.py", pydeps="PYDEPS = ['foo', 'bar']" + ) _create_lib(charm_name="charm1", lib_name="lib2.py", pydeps="PYDEPS = ['bar']") _create_lib(charm_name="charm2", lib_name="lib3.py") _create_lib(charm_name="charm2", lib_name="lib3.py", pydeps="PYDEPS = ['baz']") diff --git a/tests/unit/utils/test_cli.py b/tests/unit/utils/test_cli.py index 971f36795..c439bdb1a 100644 --- a/tests/unit/utils/test_cli.py +++ b/tests/unit/utils/test_cli.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Unit tests for CLI-related utilities.""" + import datetime import json from unittest.mock import call, patch @@ -171,7 +172,9 @@ def test_confirm_with_user(user_input, expected, mock_input, mock_isatty): assert mock_input.mock_calls == [call("prompt [y/N]: ")] -def test_confirm_with_user_errors_in_managed_mode(mock_is_charmcraft_running_in_managed_mode): +def test_confirm_with_user_errors_in_managed_mode( + mock_is_charmcraft_running_in_managed_mode, +): mock_is_charmcraft_running_in_managed_mode.return_value = True with pytest.raises(RuntimeError): diff --git a/tests/unit/utils/test_file.py b/tests/unit/utils/test_file.py index 04efda2cc..8652281f1 100644 --- a/tests/unit/utils/test_file.py +++ b/tests/unit/utils/test_file.py @@ -14,6 +14,7 @@ # # For further info, check https://github.com/canonical/charmcraft """Unit tests for file-related utilities.""" + import os import pathlib import sys diff --git a/tests/unit/utils/test_package.py b/tests/unit/utils/test_package.py index 8bfe73afb..2adf26dcc 100644 --- a/tests/unit/utils/test_package.py +++ b/tests/unit/utils/test_package.py @@ -65,8 +65,12 @@ def test_get_package_names(packages, expected): [ pytest.param(set(), set(), set(), id="empty"), pytest.param({"abc==1.0.0"}, {"abc"}, set(), id="make-empty"), - pytest.param({"abc==1.0.0", "def==1.2.3"}, {"abc"}, {"def==1.2.3"}, id="remove-one"), - pytest.param({"abc==1.0.0"}, {"invalid"}, {"abc==1.0.0"}, id="irrelevant-exclusion"), + pytest.param( + {"abc==1.0.0", "def==1.2.3"}, {"abc"}, {"def==1.2.3"}, id="remove-one" + ), + pytest.param( + {"abc==1.0.0"}, {"invalid"}, {"abc==1.0.0"}, id="irrelevant-exclusion" + ), ], ) def test_exclude_packages(requirements, excluded, expected): @@ -110,20 +114,37 @@ def test_get_requirements_file_package_names(tmp_path, file_contents, expected): ["ghi", "jkl"], ), (["abc==1.0.0", "def>=1.2.3"], [], [], "--no-binary=:all:", []), - ([], ["abc==1.0.0", "def>=1.2.3"], [], "--no-binary=:all:", ["abc==1.0.0", "def>=1.2.3"]), + ( + [], + ["abc==1.0.0", "def>=1.2.3"], + [], + "--no-binary=:all:", + ["abc==1.0.0", "def>=1.2.3"], + ), ], ) -@pytest.mark.parametrize("prefix", [["/bin/pip"], ["/some/path/to/pip3"], ["pip", "--some-param"]]) +@pytest.mark.parametrize( + "prefix", [["/bin/pip"], ["/some/path/to/pip3"], ["pip", "--some-param"]] +) def test_get_pip_command( - prefix, requirements, source_deps, binary_deps, expected_no_binary, expected_other_packages + prefix, + requirements, + source_deps, + binary_deps, + expected_no_binary, + expected_other_packages, ): with tempfile.TemporaryDirectory() as tmp_dir: path = pathlib.Path(tmp_dir, "requirements.txt") path.write_text("\n".join(requirements)) - command = get_pip_command(prefix, [path], source_deps=source_deps, binary_deps=binary_deps) + command = get_pip_command( + prefix, [path], source_deps=source_deps, binary_deps=binary_deps + ) assert command[: len(prefix)] == prefix - actual_no_binary, actual_requirement, *actual_other_packgaes = command[len(prefix) :] + actual_no_binary, actual_requirement, *actual_other_packgaes = command[ + len(prefix) : + ] assert actual_no_binary == expected_no_binary assert actual_other_packgaes == expected_other_packages assert actual_requirement == f"--requirement={path}" @@ -131,7 +152,18 @@ def test_get_pip_command( @pytest.mark.parametrize( ("pip_cmd", "stdout", "expected"), - [("pip", "pip 22.0.2 from /usr/lib/python3/dist-packages/pip (python 3.10)\n", (22, 0, 2))], + [ + ( + "pip", + "pip 22.0.2 from /usr/lib/python3/dist-packages/pip (python 3.10)\n", + (22, 0, 2), + ), + ( + "venv/bin/pip", + "pip 20.0.2 from /root/venv/lib/python3.8/site-packages/pip (python 3.8)", + (20, 0, 2), + ), + ], ) def test_get_pip_version_success( fake_process, @@ -180,7 +212,9 @@ def test_validate_strict_dependencies_success(dependencies, other_packages): ([], ["zyx", "wvut"], ["wvut", "zyx"]), ], ) -def test_validate_strict_dependencies_missing(dependencies, other_packages, extra_packages): +def test_validate_strict_dependencies_missing( + dependencies, other_packages, extra_packages +): with pytest.raises(MissingDependenciesError) as exc_info: validate_strict_dependencies(dependencies, other_packages) diff --git a/tests/unit/utils/test_platform.py b/tests/unit/utils/test_platform.py index 2f342e338..8815c4735 100644 --- a/tests/unit/utils/test_platform.py +++ b/tests/unit/utils/test_platform.py @@ -101,11 +101,15 @@ ], ) @pytest.mark.parametrize("machine", ["x86_64", "riscv64", "arm64"]) -def test_get_os_platform_linux(tmp_path, os_release, expected_system, expected_release, machine): +def test_get_os_platform_linux( + tmp_path, os_release, expected_system, expected_release, machine +): """Utilize an /etc/os-release file to determine platform.""" filepath = tmp_path / "os-release" filepath.write_text(os_release) - with patch("distro.distro._distro", distro.LinuxDistribution(os_release_file=filepath)): + with patch( + "distro.distro._distro", distro.LinuxDistribution(os_release_file=filepath) + ): with patch("platform.machine", return_value=machine): with patch("platform.system", return_value="Linux"): os_platform = get_os_platform(filepath) @@ -126,7 +130,9 @@ def test_get_os_platform_non_linux(system, release, machine): assert os_platform == OSPlatform(system, release, machine) -@given(strategies.iterables(strategies.sampled_from(sorted(const.SUPPORTED_ARCHITECTURES)))) +@given( + strategies.iterables(strategies.sampled_from(sorted(const.SUPPORTED_ARCHITECTURES))) +) def test_validate_architectures_valid_values(architectures): validate_architectures(architectures) diff --git a/tests/unit/utils/test_project.py b/tests/unit/utils/test_project.py index 7960a76e7..80db5f09b 100644 --- a/tests/unit/utils/test_project.py +++ b/tests/unit/utils/test_project.py @@ -75,8 +75,13 @@ def test_find_charm_sources_extra_charms(tmp_path, build_charm_directory, fake_c @pytest.mark.parametrize("fake_charms", [BASIC_CHARM_MAP]) -def test_find_charm_sources_non_matching_path(tmp_path, build_charm_directory, fake_charms): - charms = {name: path.with_name(f"non_matching_{name}") for name, path in fake_charms.items()} +def test_find_charm_sources_non_matching_path( + tmp_path, build_charm_directory, fake_charms +): + charms = { + name: path.with_name(f"non_matching_{name}") + for name, path in fake_charms.items() + } build_charm_directory(tmp_path, charms) actual = find_charm_sources(tmp_path, fake_charms) @@ -137,7 +142,10 @@ def test_get_charm_name_from_path_bundle(tmp_path, build_charm_directory, name, with pytest.raises(InvalidCharmPathError) as exc_info: get_charm_name_from_path(full_path) - assert exc_info.value.args[0] == f"Path does not contain source for a valid charm: {full_path}" + assert ( + exc_info.value.args[0] + == f"Path does not contain source for a valid charm: {full_path}" + ) @pytest.mark.parametrize( @@ -158,7 +166,10 @@ def test_get_charm_name_from_path_missing_file( with pytest.raises(InvalidCharmPathError) as exc_info: get_charm_name_from_path(full_path) - assert exc_info.value.args[0] == f"Path does not contain source for a valid charm: {full_path}" + assert ( + exc_info.value.args[0] + == f"Path does not contain source for a valid charm: {full_path}" + ) @pytest.mark.parametrize( @@ -169,7 +180,9 @@ def test_get_charm_name_from_path_missing_file( ("test1", "operators/test1"), ], ) -def test_get_charm_name_from_path_wrong_name(tmp_path, build_charm_directory, name, path): +def test_get_charm_name_from_path_wrong_name( + tmp_path, build_charm_directory, name, path +): build_charm_directory(tmp_path, {name: path}, file_type="bundle") full_path = tmp_path / path with (full_path / const.METADATA_FILENAME).open("w") as file: @@ -178,4 +191,7 @@ def test_get_charm_name_from_path_wrong_name(tmp_path, build_charm_directory, na with pytest.raises(InvalidCharmPathError) as exc_info: get_charm_name_from_path(full_path) - assert exc_info.value.args[0] == f"Path does not contain source for a valid charm: {full_path}" + assert ( + exc_info.value.args[0] + == f"Path does not contain source for a valid charm: {full_path}" + ) diff --git a/tests/unit/utils/test_skopeo.py b/tests/unit/utils/test_skopeo.py index 45ce80c99..31c74afaa 100644 --- a/tests/unit/utils/test_skopeo.py +++ b/tests/unit/utils/test_skopeo.py @@ -66,7 +66,9 @@ def test_find_skopeo_success(fake_process): ("kwargs", "expected"), [ pytest.param({}, [], id="empty"), - pytest.param({"insecure_policy": True}, ["--insecure-policy"], id="insecure_policy"), + pytest.param( + {"insecure_policy": True}, ["--insecure-policy"], id="insecure_policy" + ), pytest.param({"arch": "amd64"}, ["--override-arch", "amd64"], id="amd64"), pytest.param({"arch": "arm64"}, ["--override-arch", "arm64"], id="arm64"), pytest.param({"arch": "riscv64"}, ["--override-arch", "riscv64"], id="riscv64"), @@ -103,14 +105,25 @@ def fake_skopeo(fake_process): ({"preserve_digests": True}, ["--preserve-digests"]), ({"source_username": "user"}, ["--src-creds", "user"]), ({"source_password": "pass"}, ["--src-password", "pass"]), - ({"source_username": "user", "source_password": "pass"}, ["--src-creds", "user:pass"]), + ( + {"source_username": "user", "source_password": "pass"}, + ["--src-creds", "user:pass"], + ), ({"dest_username": "user"}, ["--dest-creds", "user"]), ({"dest_password": "pass"}, ["--dest-password", "pass"]), - ({"dest_username": "user", "dest_password": "pass"}, ["--dest-creds", "user:pass"]), + ( + {"dest_username": "user", "dest_password": "pass"}, + ["--dest-creds", "user:pass"], + ), ], ) def test_get_copy_command( - fake_process, fake_skopeo: Skopeo, source_image, destination_image, kwargs, expected_args + fake_process, + fake_skopeo: Skopeo, + source_image, + destination_image, + kwargs, + expected_args, ): fake_process.register( [ diff --git a/tests/unit/utils/test_store.py b/tests/unit/utils/test_store.py index 330ea3780..09b59bf67 100644 --- a/tests/unit/utils/test_store.py +++ b/tests/unit/utils/test_store.py @@ -14,12 +14,16 @@ # # For further info, check https://github.com/canonical/charmcraft """Tests for store helpers.""" + from hypothesis import given, strategies from charmcraft import utils -@given(charms=strategies.lists(strategies.text()), bundles=strategies.lists(strategies.text())) +@given( + charms=strategies.lists(strategies.text()), + bundles=strategies.lists(strategies.text()), +) def test_get_packages(charms, bundles): packages = utils.get_packages(charms=charms, bundles=bundles) result_names = [package.package_name for package in packages] diff --git a/tools/freeze-requirements.sh b/tools/freeze-requirements.sh index 39113e123..36c813be7 100755 --- a/tools/freeze-requirements.sh +++ b/tools/freeze-requirements.sh @@ -9,7 +9,7 @@ requirements_fixups() { venv_dir="$(mktemp -d)" -python3 -m venv "$venv_dir" +uv venv --python=3.10 "$venv_dir" # shellcheck source=/dev/null . "$venv_dir/bin/activate" @@ -22,12 +22,12 @@ dpkg -x ./*.deb . cp -r usr/lib/python3/dist-packages/* "$site_pkgs" popd -pip install -e . -pip freeze --exclude-editable > requirements.txt +uv pip install -e . +uv pip freeze --exclude-editable > requirements.txt requirements_fixups "requirements.txt" -pip install -e .[dev] -pip freeze --exclude-editable > requirements-dev.txt +uv pip install -e .[dev] +uv pip freeze --exclude-editable > requirements-dev.txt requirements_fixups "requirements-dev.txt" rm -rf "$venv_dir" diff --git a/tox.ini b/tox.ini index 483817d33..ca84fba42 100644 --- a/tox.ini +++ b/tox.ini @@ -3,9 +3,9 @@ # 1. Docs not included # 2. Charmcraft currently doesn't distinguish unit from integration tests env_list = # Environments to run when called with no parameters. - format-{black,ruff,codespell} + format-{ruff,codespell} pre-commit - lint-{black,ruff,mypy,pyright,shellcheck,codespell,yaml} + lint-{ruff,mypy,pyright,shellcheck,codespell,yaml} test-py3.10 # By default, only run tests on core22's Python 3.10 minversion = 4.6 @@ -65,7 +65,7 @@ runner = ignore_env_name_mismatch find = git ls-files filter = file --mime-type -Nnf- | grep shellscript | cut -f1 -d: -[testenv:lint-{black,ruff,shellcheck,codespell,yaml}] +[testenv:lint-{ruff,shellcheck,codespell,yaml}] description = Lint the source code base = testenv, lint labels = lint @@ -75,8 +75,8 @@ allowlist_externals = commands_pre = shellcheck: bash -c '{[shellcheck]find} | {[shellcheck]filter} > {env_tmp_dir}/shellcheck_files' commands = - black: black --check --diff {tty:--color} {posargs} . ruff: ruff check --respect-gitignore {posargs:.} + ruff: ruff format --diff {posargs:.} shellcheck: xargs -ra {env_tmp_dir}/shellcheck_files shellcheck codespell: codespell --toml {tox_root}/pyproject.toml {posargs} yaml: yamllint {posargs} . @@ -95,15 +95,15 @@ commands = pyright: pyright {posargs} mypy: mypy --install-types --non-interactive {posargs} -[testenv:format-{black,ruff,codespell}] +[testenv:format-{ruff,codespell}] description = Automatically format source code base = testenv, lint labels = format allowlist_externals = ruff: ruff commands = - black: black {tty:--color} {posargs} . ruff: ruff check --fix --respect-gitignore {posargs:.} + ruff: ruff format {posargs:.} codespell: codespell --toml {tox_root}/pyproject.toml --write-changes {posargs} [testenv:pre-commit]