diff --git a/.distro/python-scikit-build-core.spec b/.distro/python-scikit-build-core.spec index 50aaeedc..3298459e 100644 --- a/.distro/python-scikit-build-core.spec +++ b/.distro/python-scikit-build-core.spec @@ -13,7 +13,10 @@ Version: 0.0.0 Release: %autorelease Summary: Build backend for CMake based projects -License: Apache-2.0 +# The main project is licensed under Apache-2.0, but it has a vendored project +# src/scikit_build_core/_vendor/pyproject_metadata: MIT +# https://github.com/scikit-build/scikit-build-core/issues/933 +License: Apache-2.0 AND MIT URL: https://github.com/scikit-build/scikit-build-core Source: %{pypi_source scikit_build_core} @@ -38,6 +41,7 @@ Recommends: (ninja-build or make) Recommends: python3-scikit-build-core+pyproject = %{version}-%{release} Suggests: ninja-build Suggests: gcc +Provides: bundled(python3dist(pyproject-metadata)) BuildArch: noarch %description -n python3-scikit-build-core %_description @@ -63,6 +67,8 @@ It makes sure the dependencies are installed. %prep %autosetup -n scikit_build_core-%{version} +# Rename the bundled license so that it can be installed together +cp -p src/scikit_build_core/_vendor/pyproject_metadata/LICENSE LICENSE-pyproject-metadata %generate_buildrequires @@ -87,7 +93,7 @@ It makes sure the dependencies are installed. %files -n python3-scikit-build-core -f %{pyproject_files} -%license LICENSE +%license LICENSE LICENSE-pyproject-metadata %doc README.md diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fb3124cd..ecaa54ab 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -82,7 +82,7 @@ repos: - ninja - nox - orjson - - packaging + - packaging>=24.2 - pytest - pytest-subprocess - rich diff --git a/README.md b/README.md index a4b8e15b..15110891 100644 --- a/README.md +++ b/README.md @@ -236,8 +236,10 @@ wheel.expand-macos-universal-tags = false # to "/platlib", "/data", "/headers", and "/scripts". wheel.install-dir = "" -# A list of license files to include in the wheel. Supports glob patterns. -wheel.license-files = ["LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*"] +# A list of license files to include in the wheel. Supports glob patterns. The +# default is ``["LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*"]``. Must not be +# set if ``project.license-files`` is set. +wheel.license-files = "" # If set to True (the default), CMake will be run before building the wheel. wheel.cmake = true diff --git a/noxfile.py b/noxfile.py index 311b0d71..f56256a1 100644 --- a/noxfile.py +++ b/noxfile.py @@ -41,7 +41,7 @@ def pylint(session: nox.Session) -> None: """ # This needs to be installed into the package environment, and is slower # than a pre-commit check - session.install("-e.[dev,test,test-meta]", "pylint==3.2.*") + session.install("-e.[dev,test,test-meta]", "pylint==3.3.*") session.run("pylint", "--version") session.run("pylint", "scikit_build_core", *session.posargs) diff --git a/pyproject.toml b/pyproject.toml index da240ddf..64e2d14d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -144,6 +144,7 @@ filterwarnings = [ "default:The distutils package is deprecated and slated for removal:DeprecationWarning", # Caused by setuptools sometimes "default:The distutils.sysconfig module is deprecated, use sysconfig instead:DeprecationWarning", # Caused by setuptools sometimes "default:check_home argument is deprecated and ignored.:DeprecationWarning", # Caused by setuptools sometimes + "ignore::scikit_build_core._vendor.pyproject_metadata.errors.ConfigurationWarning", ] log_cli_level = "info" pythonpath = ["tests/utils"] @@ -203,6 +204,7 @@ messages_control.disable = [ "broad-except", "unused-argument", # Handled by Ruff "redefined-builtin", # ExceptionGroup is a builtin + "using-exception-groups-in-unsupported-version", # We are using a backport ] diff --git a/src/scikit_build_core/_vendor/pyproject_metadata/__init__.py b/src/scikit_build_core/_vendor/pyproject_metadata/__init__.py index 7ebc91dd..f12b26bf 100644 --- a/src/scikit_build_core/_vendor/pyproject_metadata/__init__.py +++ b/src/scikit_build_core/_vendor/pyproject_metadata/__init__.py @@ -1,17 +1,54 @@ # SPDX-License-Identifier: MIT +""" +This is pyproject_metadata, a library for working with PEP 621 metadata. + +Example usage: + +.. code-block:: python + + from pyproject_metadata import StandardMetadata + + metadata = StandardMetadata.from_pyproject( + parsed_pyproject, allow_extra_keys=False, all_errors=True, metadata_version="2.3" + ) + + pkg_info = metadata.as_rfc822() + with open("METADATA", "wb") as f: + f.write(pkg_info.as_bytes()) + + ep = self.metadata.entrypoints.copy() + ep["console_scripts"] = self.metadata.scripts + ep["gui_scripts"] = self.metadata.gui_scripts + for group, entries in ep.items(): + if entries: + with open("entry_points.txt", "w", encoding="utf-8") as f: + print(f"[{group}]", file=f) + for name, target in entries.items(): + print(f"{name} = {target}", file=f) + print(file=f) + +""" + from __future__ import annotations -import collections import copy import dataclasses +import email.message +import email.policy import email.utils import os import os.path import pathlib +import re import sys import typing +import warnings +# Build backends may vendor this package, so all imports are relative. +from . import constants +from .errors import ConfigurationError, ConfigurationWarning, ErrorCollector +from .pyproject import License, PyProjectReader, Readme if typing.TYPE_CHECKING: from collections.abc import Mapping @@ -24,519 +61,593 @@ else: from typing import Self + from .project_table import Dynamic, PyProjectTable + import packaging.markers -import packaging.requirements import packaging.specifiers import packaging.utils import packaging.version +__version__ = "0.9.0" -__version__ = '0.8.0' +__all__ = [ + "ConfigurationError", + "License", + "RFC822Message", + "RFC822Policy", + "Readme", + "StandardMetadata", + "field_to_metadata", + "extras_build_system", + "extras_project", + "extras_top_level", +] -KNOWN_METADATA_VERSIONS = {'2.1', '2.2', '2.3'} +def __dir__() -> list[str]: + return __all__ -class ConfigurationError(Exception): - '''Error in the backend metadata.''' - def __init__(self, msg: str, *, key: str | None = None): - super().__init__(msg) - self._key = key - @property - def key(self) -> str | None: # pragma: no cover - return self._key +def field_to_metadata(field: str) -> frozenset[str]: + """ + Return the METADATA fields that correspond to a project field. + """ + return frozenset(constants.PROJECT_TO_METADATA[field]) -class RFC822Message: - '''Python-flavored RFC 822 message implementation.''' +def extras_top_level(pyproject_table: Mapping[str, Any]) -> set[str]: + """ + Return any extra keys in the top-level of the pyproject table. + """ + return set(pyproject_table) - constants.KNOWN_TOPLEVEL_FIELDS + + +def extras_build_system(pyproject_table: Mapping[str, Any]) -> set[str]: + """ + Return any extra keys in the build-system table. + """ + return ( + set(pyproject_table.get("build-system", [])) + - constants.KNOWN_BUILD_SYSTEM_FIELDS + ) - def __init__(self) -> None: - self.headers: collections.OrderedDict[str, list[str]] = collections.OrderedDict() - self.body: str | None = None + +def extras_project(pyproject_table: Mapping[str, Any]) -> set[str]: + """ + Return any extra keys in the project table. + """ + return set(pyproject_table.get("project", [])) - constants.KNOWN_PROJECT_FIELDS + + +@dataclasses.dataclass +class _SmartMessageSetter: + """ + This provides a nice internal API for setting values in an Message to + reduce boilerplate. + + If a value is None, do nothing. + """ + + message: email.message.Message def __setitem__(self, name: str, value: str | None) -> None: if not value: return - if name not in self.headers: - self.headers[name] = [] - self.headers[name].append(value) - - def __str__(self) -> str: - text = '' - for name, entries in self.headers.items(): - for entry in entries: - lines = entry.strip('\n').split('\n') - text += f'{name}: {lines[0]}\n' - for line in lines[1:]: - text += ' ' * 8 + line + '\n' - if self.body: - text += '\n' + self.body - return text - - def __bytes__(self) -> bytes: - return str(self).encode() - - -class DataFetcher: - def __init__(self, data: Mapping[str, Any]) -> None: - self._data = data - - def __contains__(self, key: Any) -> bool: - if not isinstance(key, str): - return False - val = self._data - try: - for part in key.split('.'): - val = val[part] - except KeyError: - return False - return True - - def get(self, key: str) -> Any: - val = self._data - for part in key.split('.'): - val = val[part] - return val - - def get_str(self, key: str) -> str | None: - try: - val = self.get(key) - if not isinstance(val, str): - msg = f'Field "{key}" has an invalid type, expecting a string (got "{val}")' - raise ConfigurationError(msg, key=key) - return val - except KeyError: - return None - - def get_list(self, key: str) -> list[str]: - try: - val = self.get(key) - if not isinstance(val, list): - msg = f'Field "{key}" has an invalid type, expecting a list of strings (got "{val}")' - raise ConfigurationError(msg, key=val) - for item in val: - if not isinstance(item, str): - msg = f'Field "{key}" contains item with invalid type, expecting a string (got "{item}")' - raise ConfigurationError(msg, key=key) - return val - except KeyError: - return [] - - def get_dict(self, key: str) -> dict[str, str]: - try: - val = self.get(key) - if not isinstance(val, dict): - msg = f'Field "{key}" has an invalid type, expecting a dictionary of strings (got "{val}")' - raise ConfigurationError(msg, key=key) - for subkey, item in val.items(): - if not isinstance(item, str): - msg = f'Field "{key}.{subkey}" has an invalid type, expecting a string (got "{item}")' - raise ConfigurationError(msg, key=f'{key}.{subkey}') - return val - except KeyError: - return {} - - def get_people(self, key: str) -> list[tuple[str, str]]: - try: - val = self.get(key) - if not ( - isinstance(val, list) - and all(isinstance(x, dict) for x in val) - and all( - isinstance(item, str) - for items in [_dict.values() for _dict in val] - for item in items - ) - ): - msg = ( - f'Field "{key}" has an invalid type, expecting a list of ' - f'dictionaries containing the "name" and/or "email" keys (got "{val}")' - ) - raise ConfigurationError(msg, key=key) - return [ - (entry.get('name', 'Unknown'), entry.get('email')) - for entry in val - ] - except KeyError: - return [] + self.message[name] = value + + def set_payload(self, payload: str) -> None: + self.message.set_payload(payload) + + +@dataclasses.dataclass +class _JSonMessageSetter: + """ + This provides an API to build a JSON message output in the same way as the + classic Message. Line breaks are preserved this way. + """ + + data: dict[str, str | list[str]] + + def __setitem__(self, name: str, value: str | None) -> None: + name = name.lower() + key = name.replace("-", "_") + + if value is None: + return + + if name == "keywords": + values = (x.strip() for x in value.split(",")) + self.data[key] = [x for x in values if x] + elif name in constants.KNOWN_MULTIUSE: + entry = self.data.setdefault(key, []) + assert isinstance(entry, list) + entry.append(value) + else: + self.data[key] = value + + def set_payload(self, payload: str) -> None: + self["description"] = payload -class License(typing.NamedTuple): - text: str - file: pathlib.Path | None +class RFC822Policy(email.policy.EmailPolicy): + """ + This is :class:`email.policy.EmailPolicy`, but with a simple ``header_store_parse`` + implementation that handles multiline values, and some nice defaults. + """ + utf8 = True + mangle_from_ = False + max_line_length = 0 -class Readme(typing.NamedTuple): - text: str - file: pathlib.Path | None - content_type: str + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: + if name.lower() not in constants.KNOWN_METADATA_FIELDS: + msg = f"Unknown field {name!r}" + raise ConfigurationError(msg, key=name) + size = len(name) + 2 + value = value.replace("\n", "\n" + " " * size) + return (name, value) + + +class RFC822Message(email.message.EmailMessage): + """ + This is :class:`email.message.EmailMessage` with two small changes: it defaults to + our `RFC822Policy`, and it correctly writes unicode when being called + with `bytes()`. + """ + + def __init__(self) -> None: + super().__init__(policy=RFC822Policy()) + + def as_bytes( + self, unixfrom: bool = False, policy: email.policy.Policy | None = None + ) -> bytes: + """ + This handles unicode encoding. + """ + return self.as_string(unixfrom, policy=policy).encode("utf-8") @dataclasses.dataclass class StandardMetadata: + """ + This class represents the standard metadata fields for a project. It can be + used to read metadata from a pyproject.toml table, validate it, and write it + to an RFC822 message or JSON. + """ + name: str version: packaging.version.Version | None = None description: str | None = None - license: License | None = None + license: License | str | None = None + license_files: list[pathlib.Path] | None = None readme: Readme | None = None requires_python: packaging.specifiers.SpecifierSet | None = None dependencies: list[Requirement] = dataclasses.field(default_factory=list) - optional_dependencies: dict[str, list[Requirement]] = dataclasses.field(default_factory=dict) + optional_dependencies: dict[str, list[Requirement]] = dataclasses.field( + default_factory=dict + ) entrypoints: dict[str, dict[str, str]] = dataclasses.field(default_factory=dict) - authors: list[tuple[str, str]] = dataclasses.field(default_factory=list) - maintainers: list[tuple[str, str]] = dataclasses.field(default_factory=list) + authors: list[tuple[str, str | None]] = dataclasses.field(default_factory=list) + maintainers: list[tuple[str, str | None]] = dataclasses.field(default_factory=list) urls: dict[str, str] = dataclasses.field(default_factory=dict) classifiers: list[str] = dataclasses.field(default_factory=list) keywords: list[str] = dataclasses.field(default_factory=list) scripts: dict[str, str] = dataclasses.field(default_factory=dict) gui_scripts: dict[str, str] = dataclasses.field(default_factory=dict) - dynamic: list[str] = dataclasses.field(default_factory=list) - - _metadata_version: str | None = None + dynamic: list[Dynamic] = dataclasses.field(default_factory=list) + """ + This field is used to track dynamic fields. You can't set a field not in this list. + """ + + dynamic_metadata: list[str] = dataclasses.field(default_factory=list) + """ + This is a list of METADATA fields that can change in between SDist and wheel. Requires metadata_version 2.2+. + """ + metadata_version: str | None = None + """ + This is the target metadata version. If None, it will be computed as a minimum based on the fields set. + """ + all_errors: bool = False + """ + If True, all errors will be collected and raised in an ExceptionGroup. + """ + + def __post_init__(self) -> None: + self.validate() @property - def metadata_version(self) -> str: - if self._metadata_version is None: - return '2.2' if self.dynamic else '2.1' - return self._metadata_version + def auto_metadata_version(self) -> str: + """ + This computes the metadata version based on the fields set in the object + if ``metadata_version`` is None. + """ + if self.metadata_version is not None: + return self.metadata_version + + if isinstance(self.license, str) or self.license_files is not None: + return "2.4" + if self.dynamic_metadata: + return "2.2" + return "2.1" @property def canonical_name(self) -> str: + """ + Return the canonical name of the project. + """ return packaging.utils.canonicalize_name(self.name) @classmethod - def from_pyproject( + def from_pyproject( # noqa: C901 cls, data: Mapping[str, Any], project_dir: str | os.PathLike[str] = os.path.curdir, metadata_version: str | None = None, + dynamic_metadata: list[str] | None = None, + *, + allow_extra_keys: bool | None = None, + all_errors: bool = False, ) -> Self: - fetcher = DataFetcher(data) + """ + Read metadata from a pyproject.toml table. This is the main method for + creating an instance of this class. It also supports two additional + fields: ``allow_extra_keys`` to control what happens when extra keys are + present in the pyproject table, and ``all_errors``, to raise all errors + in an ExceptionGroup instead of raising the first one. + """ + pyproject = PyProjectReader(collect_errors=all_errors) + + pyproject_table: PyProjectTable = data # type: ignore[assignment] + if "project" not in pyproject_table: + msg = "Section {key} missing in pyproject.toml" + pyproject.config_error(msg, key="project") + pyproject.finalize("Failed to parse pyproject.toml") + msg = "Unreachable code" # pragma: no cover + raise AssertionError(msg) # pragma: no cover + + project = pyproject_table["project"] project_dir = pathlib.Path(project_dir) - if 'project' not in fetcher: - msg = 'Section "project" missing in pyproject.toml' - raise ConfigurationError(msg) + if not allow_extra_keys: + extra_keys = extras_project(data) + if extra_keys: + extra_keys_str = ", ".join(sorted(f"{k!r}" for k in extra_keys)) + msg = "Extra keys present in {key}: {extra_keys}" + pyproject.config_error( + msg, + key="project", + extra_keys=extra_keys_str, + warn=allow_extra_keys is None, + ) - dynamic = fetcher.get_list('project.dynamic') - if 'name' in dynamic: - msg = 'Unsupported field "name" in "project.dynamic"' - raise ConfigurationError(msg) + dynamic = pyproject.get_dynamic(project) for field in dynamic: - if field in data['project']: - msg = f'Field "project.{field}" declared as dynamic in "project.dynamic" but is defined' - raise ConfigurationError(msg) - - name = fetcher.get_str('project.name') - if not name: - msg = 'Field "project.name" missing' - raise ConfigurationError(msg) - - version_string = fetcher.get_str('project.version') - requires_python_string = fetcher.get_str('project.requires-python') - version = packaging.version.Version(version_string) if version_string else None - - if version is None and 'version' not in dynamic: - msg = 'Field "project.version" missing and "version" not specified in "project.dynamic"' - raise ConfigurationError(msg) + if field in data["project"]: + msg = 'Field {key} declared as dynamic in "project.dynamic" but is defined' + pyproject.config_error(msg, key=f"project.{field}") + + raw_name = project.get("name") + name = "UNKNOWN" + if raw_name is None: + msg = "Field {key} missing" + pyproject.config_error(msg, key="project.name") + else: + tmp_name = pyproject.ensure_str(raw_name, "project.name") + if tmp_name is not None: + name = tmp_name + + version: packaging.version.Version | None = packaging.version.Version("0.0.0") + raw_version = project.get("version") + if raw_version is not None: + version_string = pyproject.ensure_str(raw_version, "project.version") + if version_string is not None: + try: + version = ( + packaging.version.Version(version_string) + if version_string + else None + ) + except packaging.version.InvalidVersion: + msg = "Invalid {key} value, expecting a valid PEP 440 version" + pyproject.config_error( + msg, key="project.version", got=version_string + ) + elif "version" not in dynamic: + msg = ( + "Field {key} missing and 'version' not specified in \"project.dynamic\"" + ) + pyproject.config_error(msg, key="project.version") # Description fills Summary, which cannot be multiline # However, throwing an error isn't backward compatible, # so leave it up to the users for now. - description = fetcher.get_str('project.description') - - if metadata_version and metadata_version not in KNOWN_METADATA_VERSIONS: - msg = f'The metadata_version must be one of {KNOWN_METADATA_VERSIONS} or None (default)' - raise ConfigurationError(msg) - - return cls( - name, - version, - description, - cls._get_license(fetcher, project_dir), - cls._get_readme(fetcher, project_dir), - packaging.specifiers.SpecifierSet(requires_python_string) if requires_python_string else None, - cls._get_dependencies(fetcher), - cls._get_optional_dependencies(fetcher), - cls._get_entrypoints(fetcher), - fetcher.get_people('project.authors'), - fetcher.get_people('project.maintainers'), - fetcher.get_dict('project.urls'), - fetcher.get_list('project.classifiers'), - fetcher.get_list('project.keywords'), - fetcher.get_dict('project.scripts'), - fetcher.get_dict('project.gui-scripts'), - dynamic, - metadata_version, + project_description_raw = project.get("description") + description = ( + pyproject.ensure_str(project_description_raw, "project.description") + if project_description_raw is not None + else None ) - def _update_dynamic(self, value: Any) -> None: - if value and 'version' in self.dynamic: - self.dynamic.remove('version') + requires_python_raw = project.get("requires-python") + requires_python = None + if requires_python_raw is not None: + requires_python_string = pyproject.ensure_str( + requires_python_raw, "project.requires-python" + ) + if requires_python_string is not None: + try: + requires_python = packaging.specifiers.SpecifierSet( + requires_python_string + ) + except packaging.specifiers.InvalidSpecifier: + msg = "Invalid {key} value, expecting a valid specifier set" + pyproject.config_error( + msg, key="project.requires-python", got=requires_python_string + ) + + self = None + with pyproject.collect(): + self = cls( + name=name, + version=version, + description=description, + license=pyproject.get_license(project, project_dir), + license_files=pyproject.get_license_files(project, project_dir), + readme=pyproject.get_readme(project, project_dir), + requires_python=requires_python, + dependencies=pyproject.get_dependencies(project), + optional_dependencies=pyproject.get_optional_dependencies(project), + entrypoints=pyproject.get_entrypoints(project), + authors=pyproject.ensure_people( + project.get("authors", []), "project.authors" + ), + maintainers=pyproject.ensure_people( + project.get("maintainers", []), "project.maintainers" + ), + urls=pyproject.ensure_dict(project.get("urls", {}), "project.urls") + or {}, + classifiers=pyproject.ensure_list( + project.get("classifiers", []), "project.classifiers" + ) + or [], + keywords=pyproject.ensure_list( + project.get("keywords", []), "project.keywords" + ) + or [], + scripts=pyproject.ensure_dict( + project.get("scripts", {}), "project.scripts" + ) + or {}, + gui_scripts=pyproject.ensure_dict( + project.get("gui-scripts", {}), "project.gui-scripts" + ) + or {}, + dynamic=dynamic, + dynamic_metadata=dynamic_metadata or [], + metadata_version=metadata_version, + all_errors=all_errors, + ) - def __setattr__(self, name: str, value: Any) -> None: - # update dynamic when version is set - if name == 'version' and hasattr(self, 'dynamic'): - self._update_dynamic(value) - super().__setattr__(name, value) + pyproject.finalize("Failed to parse pyproject.toml") + assert self is not None + return self def as_rfc822(self) -> RFC822Message: + """ + Return an RFC822 message with the metadata. + """ message = RFC822Message() - self.write_to_rfc822(message) + smart_message = _SmartMessageSetter(message) + self._write_metadata(smart_message) + return message + + def as_json(self) -> dict[str, str | list[str]]: + """ + Return a JSON message with the metadata. + """ + message: dict[str, str | list[str]] = {} + smart_message = _JSonMessageSetter(message) + self._write_metadata(smart_message) return message - def write_to_rfc822(self, message: RFC822Message) -> None: # noqa: C901 - message['Metadata-Version'] = self.metadata_version - message['Name'] = self.name + def validate(self, *, warn: bool = True) -> None: # noqa: C901 + """ + Validate metadata for consistency and correctness. Will also produce + warnings if ``warn`` is given. Respects ``all_errors``. This is called + when loading a pyproject.toml, and when making metadata. Checks: + + - ``metadata_version`` is a known version or None + - ``name`` is a valid project name + - ``license_files`` can't be used with classic ``license`` + - License classifiers can't be used with SPDX license + - ``description`` is a single line (warning) + - ``license`` is not an SPDX license expression if metadata_version >= 2.4 (warning) + - License classifiers deprecated for metadata_version >= 2.4 (warning) + - ``license`` is an SPDX license expression if metadata_version >= 2.4 + - ``license_files`` is supported only for metadata_version >= 2.4 + - ``project_url`` can't contain keys over 32 characters + """ + errors = ErrorCollector(collect_errors=self.all_errors) + + if self.auto_metadata_version not in constants.KNOWN_METADATA_VERSIONS: + msg = "The metadata_version must be one of {versions} or None (default)" + errors.config_error(msg, versions=constants.KNOWN_METADATA_VERSIONS) + + # See https://packaging.python.org/en/latest/specifications/core-metadata/#name and + # https://packaging.python.org/en/latest/specifications/name-normalization/#name-format + if not re.match( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", self.name, re.IGNORECASE + ): + msg = ( + "Invalid project name {name!r}. A valid name consists only of ASCII letters and " + "numbers, period, underscore and hyphen. It must start and end with a letter or number" + ) + errors.config_error(msg, key="project.name", name=self.name) + + if self.license_files is not None and isinstance(self.license, License): + msg = '{key} must not be used when "project.license" is not a SPDX license expression' + errors.config_error(msg, key="project.license-files") + + if isinstance(self.license, str) and any( + c.startswith("License ::") for c in self.classifiers + ): + msg = "Setting {key} to an SPDX license expression is not compatible with 'License ::' classifiers" + errors.config_error(msg, key="project.license") + + if warn: + if self.description and "\n" in self.description: + warnings.warn( + 'The one-line summary "project.description" should not contain more than one line. Readers might merge or truncate newlines.', + ConfigurationWarning, + stacklevel=2, + ) + if self.auto_metadata_version not in constants.PRE_SPDX_METADATA_VERSIONS: + if isinstance(self.license, License): + warnings.warn( + 'Set "project.license" to an SPDX license expression for metadata >= 2.4', + ConfigurationWarning, + stacklevel=2, + ) + elif any(c.startswith("License ::") for c in self.classifiers): + warnings.warn( + "'License ::' classifiers are deprecated for metadata >= 2.4, use a SPDX license expression for \"project.license\" instead", + ConfigurationWarning, + stacklevel=2, + ) + + if ( + isinstance(self.license, str) + and self.auto_metadata_version in constants.PRE_SPDX_METADATA_VERSIONS + ): + msg = "Setting {key} to an SPDX license expression is supported only when emitting metadata version >= 2.4" + errors.config_error(msg, key="project.license") + + if ( + self.license_files is not None + and self.auto_metadata_version in constants.PRE_SPDX_METADATA_VERSIONS + ): + msg = "{key} is supported only when emitting metadata version >= 2.4" + errors.config_error(msg, key="project.license-files") + + for name in self.urls: + if len(name) > 32: + msg = "{key} names cannot be more than 32 characters long" + errors.config_error(msg, key="project.urls", got=name) + + errors.finalize("Metadata validation failed") + + def _write_metadata( # noqa: C901 + self, smart_message: _SmartMessageSetter | _JSonMessageSetter + ) -> None: + """ + Write the metadata to the message. Handles JSON or Message. + """ + self.validate(warn=False) + + smart_message["Metadata-Version"] = self.auto_metadata_version + smart_message["Name"] = self.name if not self.version: - msg = 'Missing version field' + msg = "Missing version field" raise ConfigurationError(msg) - message['Version'] = str(self.version) + smart_message["Version"] = str(self.version) # skip 'Platform' # skip 'Supported-Platform' if self.description: - message['Summary'] = self.description - message['Keywords'] = ','.join(self.keywords) - if 'homepage' in self.urls: - message['Home-page'] = self.urls['homepage'] + smart_message["Summary"] = self.description + smart_message["Keywords"] = ",".join(self.keywords) or None + # skip 'Home-page' # skip 'Download-URL' - message['Author'] = self._name_list(self.authors) - message['Author-Email'] = self._email_list(self.authors) - message['Maintainer'] = self._name_list(self.maintainers) - message['Maintainer-Email'] = self._email_list(self.maintainers) - if self.license: - message['License'] = self.license.text + smart_message["Author"] = _name_list(self.authors) + smart_message["Author-Email"] = _email_list(self.authors) + smart_message["Maintainer"] = _name_list(self.maintainers) + smart_message["Maintainer-Email"] = _email_list(self.maintainers) + + if isinstance(self.license, License): + smart_message["License"] = self.license.text + elif isinstance(self.license, str): + smart_message["License-Expression"] = self.license + + if self.license_files is not None: + for license_file in sorted(set(self.license_files)): + smart_message["License-File"] = os.fspath(license_file.as_posix()) + elif ( + self.auto_metadata_version not in constants.PRE_SPDX_METADATA_VERSIONS + and isinstance(self.license, License) + and self.license.file + ): + smart_message["License-File"] = os.fspath(self.license.file.as_posix()) + for classifier in self.classifiers: - message['Classifier'] = classifier + smart_message["Classifier"] = classifier # skip 'Provides-Dist' # skip 'Obsoletes-Dist' # skip 'Requires-External' for name, url in self.urls.items(): - message['Project-URL'] = f'{name.capitalize()}, {url}' + smart_message["Project-URL"] = f"{name}, {url}" if self.requires_python: - message['Requires-Python'] = str(self.requires_python) + smart_message["Requires-Python"] = str(self.requires_python) for dep in self.dependencies: - message['Requires-Dist'] = str(dep) + smart_message["Requires-Dist"] = str(dep) for extra, requirements in self.optional_dependencies.items(): - norm_extra = extra.replace('.', '-').replace('_', '-').lower() - message['Provides-Extra'] = norm_extra + norm_extra = extra.replace(".", "-").replace("_", "-").lower() + smart_message["Provides-Extra"] = norm_extra for requirement in requirements: - message['Requires-Dist'] = str(self._build_extra_req(norm_extra, requirement)) + smart_message["Requires-Dist"] = str( + _build_extra_req(norm_extra, requirement) + ) if self.readme: if self.readme.content_type: - message['Description-Content-Type'] = self.readme.content_type - message.body = self.readme.text + smart_message["Description-Content-Type"] = self.readme.content_type + smart_message.set_payload(self.readme.text) # Core Metadata 2.2 - if self.metadata_version != '2.1': - for field in self.dynamic: - if field in ('name', 'version'): - msg = f'Field cannot be dynamic: {field}' + if self.auto_metadata_version != "2.1": + for field in self.dynamic_metadata: + if field.lower() in {"name", "version", "dynamic"}: + msg = f"Field cannot be set as dynamic metadata: {field}" + raise ConfigurationError(msg) + if field.lower() not in constants.KNOWN_METADATA_FIELDS: + msg = f"Field is not known: {field}" raise ConfigurationError(msg) - message['Dynamic'] = field + smart_message["Dynamic"] = field - def _name_list(self, people: list[tuple[str, str]]) -> str: - return ', '.join( - name - for name, email_ in people - if not email_ - ) - def _email_list(self, people: list[tuple[str, str]]) -> str: - return ', '.join( - email.utils.formataddr((name, _email)) - for name, _email in people - if _email - ) +def _name_list(people: list[tuple[str, str | None]]) -> str | None: + """ + Build a comma-separated list of names. + """ + return ", ".join(name for name, email_ in people if not email_) or None - def _build_extra_req( - self, - extra: str, - requirement: Requirement, - ) -> Requirement: - # append or add our extra marker - requirement = copy.copy(requirement) - if requirement.marker: - if 'or' in requirement.marker._markers: - requirement.marker = packaging.markers.Marker( - f'({requirement.marker}) and extra == "{extra}"' - ) - else: - requirement.marker = packaging.markers.Marker( - f'{requirement.marker} and extra == "{extra}"' - ) - else: - requirement.marker = packaging.markers.Marker(f'extra == "{extra}"') - return requirement - - @staticmethod - def _get_license(fetcher: DataFetcher, project_dir: pathlib.Path) -> License | None: - if 'project.license' not in fetcher: - return None - - _license = fetcher.get_dict('project.license') - for field in _license: - if field not in ('file', 'text'): - msg = f'Unexpected field "project.license.{field}"' - raise ConfigurationError(msg, key=f'project.license.{field}') - - file: pathlib.Path | None = None - filename = fetcher.get_str('project.license.file') - text = fetcher.get_str('project.license.text') - - if (filename and text) or (not filename and not text): - msg = f'Invalid "project.license" value, expecting either "file" or "text" (got "{_license}")' - raise ConfigurationError(msg, key='project.license') - - if filename: - file = project_dir.joinpath(filename) - if not file.is_file(): - msg = f'License file not found ("{filename}")' - raise ConfigurationError(msg, key='project.license.file') - text = file.read_text(encoding='utf-8') - - assert text is not None - return License(text, file) - - @staticmethod - def _get_readme(fetcher: DataFetcher, project_dir: pathlib.Path) -> Readme | None: # noqa: C901 - if 'project.readme' not in fetcher: - return None - - filename: str | None - file: pathlib.Path | None = None - text: str | None - content_type: str | None - - readme = fetcher.get('project.readme') - if isinstance(readme, str): - # readme is a file - text = None - filename = readme - if filename.endswith('.md'): - content_type = 'text/markdown' - elif filename.endswith('.rst'): - content_type = 'text/x-rst' - else: - msg = f'Could not infer content type for readme file "{filename}"' - raise ConfigurationError(msg, key='project.readme') - elif isinstance(readme, dict): - # readme is a dict containing either 'file' or 'text', and content-type - for field in readme: - if field not in ('content-type', 'file', 'text'): - msg = f'Unexpected field "project.readme.{field}"' - raise ConfigurationError(msg, key=f'project.readme.{field}') - content_type = fetcher.get_str('project.readme.content-type') - filename = fetcher.get_str('project.readme.file') - text = fetcher.get_str('project.readme.text') - if (filename and text) or (not filename and not text): - msg = f'Invalid "project.readme" value, expecting either "file" or "text" (got "{readme}")' - raise ConfigurationError(msg, key='project.readme') - if not content_type: - msg = 'Field "project.readme.content-type" missing' - raise ConfigurationError(msg, key='project.readme.content-type') - else: - msg = ( - f'Field "project.readme" has an invalid type, expecting either, ' - f'a string or dictionary of strings (got "{readme}")' - ) - raise ConfigurationError(msg, key='project.readme') - - if filename: - file = project_dir.joinpath(filename) - if not file.is_file(): - msg = f'Readme file not found ("{filename}")' - raise ConfigurationError(msg, key='project.readme.file') - text = file.read_text(encoding='utf-8') - - assert text is not None - return Readme(text, file, content_type) - - @staticmethod - def _get_dependencies(fetcher: DataFetcher) -> list[Requirement]: - try: - requirement_strings = fetcher.get_list('project.dependencies') - except KeyError: - return [] - - requirements: list[Requirement] = [] - for req in requirement_strings: - try: - requirements.append(packaging.requirements.Requirement(req)) - except packaging.requirements.InvalidRequirement as e: - msg = ( - 'Field "project.dependencies" contains an invalid PEP 508 ' - f'requirement string "{req}" ("{e}")' - ) - raise ConfigurationError(msg) from None - return requirements - - @staticmethod - def _get_optional_dependencies(fetcher: DataFetcher) -> dict[str, list[Requirement]]: - try: - val = fetcher.get('project.optional-dependencies') - except KeyError: - return {} - - requirements_dict: dict[str, list[Requirement]] = {} - if not isinstance(val, dict): - msg = ( - 'Field "project.optional-dependencies" has an invalid type, expecting a ' - f'dictionary of PEP 508 requirement strings (got "{val}")' + +def _email_list(people: list[tuple[str, str | None]]) -> str | None: + """ + Build a comma-separated list of emails. + """ + return ( + ", ".join( + email.utils.formataddr((name, _email)) for name, _email in people if _email + ) + or None + ) + + +def _build_extra_req( + extra: str, + requirement: Requirement, +) -> Requirement: + """ + Build a new requirement with an extra marker. + """ + requirement = copy.copy(requirement) + if requirement.marker: + if "or" in requirement.marker._markers: + requirement.marker = packaging.markers.Marker( + f"({requirement.marker}) and extra == {extra!r}" ) - raise ConfigurationError(msg) - for extra, requirements in val.copy().items(): - assert isinstance(extra, str) - if not isinstance(requirements, list): - msg = ( - f'Field "project.optional-dependencies.{extra}" has an invalid type, expecting a ' - f'dictionary PEP 508 requirement strings (got "{requirements}")' - ) - raise ConfigurationError(msg) - requirements_dict[extra] = [] - for req in requirements: - if not isinstance(req, str): - msg = ( - f'Field "project.optional-dependencies.{extra}" has an invalid type, ' - f'expecting a PEP 508 requirement string (got "{req}")' - ) - raise ConfigurationError(msg) - try: - requirements_dict[extra].append(packaging.requirements.Requirement(req)) - except packaging.requirements.InvalidRequirement as e: - msg = ( - f'Field "project.optional-dependencies.{extra}" contains ' - f'an invalid PEP 508 requirement string "{req}" ("{e}")' - ) - raise ConfigurationError(msg) from None - return dict(requirements_dict) - - @staticmethod - def _get_entrypoints(fetcher: DataFetcher) -> dict[str, dict[str, str]]: - try: - val = fetcher.get('project.entry-points') - except KeyError: - return {} - if not isinstance(val, dict): - msg = ( - 'Field "project.entry-points" has an invalid type, expecting a ' - f'dictionary of entrypoint sections (got "{val}")' + else: + requirement.marker = packaging.markers.Marker( + f"{requirement.marker} and extra == {extra!r}" ) - raise ConfigurationError(msg) - for section, entrypoints in val.items(): - assert isinstance(section, str) - if not isinstance(entrypoints, dict): - msg = ( - f'Field "project.entry-points.{section}" has an invalid type, expecting a ' - f'dictionary of entrypoints (got "{entrypoints}")' - ) - raise ConfigurationError(msg) - for name, entrypoint in entrypoints.items(): - assert isinstance(name, str) - if not isinstance(entrypoint, str): - msg = ( - f'Field "project.entry-points.{section}.{name}" has an invalid type, ' - f'expecting a string (got "{entrypoint}")' - ) - raise ConfigurationError(msg) - return val + else: + requirement.marker = packaging.markers.Marker(f"extra == {extra!r}") + return requirement diff --git a/src/scikit_build_core/_vendor/pyproject_metadata/constants.py b/src/scikit_build_core/_vendor/pyproject_metadata/constants.py new file mode 100644 index 00000000..afe42818 --- /dev/null +++ b/src/scikit_build_core/_vendor/pyproject_metadata/constants.py @@ -0,0 +1,103 @@ +# SPDX-License-Identifier: MIT + +""" +Constants for the pyproject_metadata package, collected here to make them easy +to update. These should be considered mostly private. +""" + +from __future__ import annotations + +__all__ = [ + "KNOWN_BUILD_SYSTEM_FIELDS", + "KNOWN_METADATA_FIELDS", + "KNOWN_METADATA_VERSIONS", + "KNOWN_METADATA_VERSIONS", + "KNOWN_MULTIUSE", + "KNOWN_PROJECT_FIELDS", + "KNOWN_TOPLEVEL_FIELDS", + "PRE_SPDX_METADATA_VERSIONS", + "PROJECT_TO_METADATA", +] + + +def __dir__() -> list[str]: + return __all__ + + +KNOWN_METADATA_VERSIONS = {"2.1", "2.2", "2.3", "2.4"} +PRE_SPDX_METADATA_VERSIONS = {"2.1", "2.2", "2.3"} + +PROJECT_TO_METADATA = { + "authors": frozenset(["Author", "Author-Email"]), + "classifiers": frozenset(["Classifier"]), + "dependencies": frozenset(["Requires-Dist"]), + "description": frozenset(["Summary"]), + "dynamic": frozenset(), + "entry-points": frozenset(), + "gui-scripts": frozenset(), + "keywords": frozenset(["Keywords"]), + "license": frozenset(["License", "License-Expression"]), + "license-files": frozenset(["License-File"]), + "maintainers": frozenset(["Maintainer", "Maintainer-Email"]), + "name": frozenset(["Name"]), + "optional-dependencies": frozenset(["Provides-Extra", "Requires-Dist"]), + "readme": frozenset(["Description", "Description-Content-Type"]), + "requires-python": frozenset(["Requires-Python"]), + "scripts": frozenset(), + "urls": frozenset(["Project-URL"]), + "version": frozenset(["Version"]), +} + +KNOWN_TOPLEVEL_FIELDS = {"build-system", "project", "tool", "dependency-groups"} +KNOWN_BUILD_SYSTEM_FIELDS = {"backend-path", "build-backend", "requires"} +KNOWN_PROJECT_FIELDS = set(PROJECT_TO_METADATA) + +KNOWN_METADATA_FIELDS = { + "author", + "author-email", + "classifier", + "description", + "description-content-type", + "download-url", # Not specified via pyproject standards, deprecated by PEP 753 + "dynamic", # Can't be in dynamic + "home-page", # Not specified via pyproject standards, deprecated by PEP 753 + "keywords", + "license", + "license-expression", + "license-file", + "maintainer", + "maintainer-email", + "metadata-version", + "name", # Can't be in dynamic + "obsoletes", # Deprecated + "obsoletes-dist", # Rarely used + "platform", # Not specified via pyproject standards + "project-url", + "provides", # Deprecated + "provides-dist", # Rarely used + "provides-extra", + "requires", # Deprecated + "requires-dist", + "requires-external", # Not specified via pyproject standards + "requires-python", + "summary", + "supported-platform", # Not specified via pyproject standards + "version", # Can't be in dynamic +} + +KNOWN_MULTIUSE = { + "dynamic", + "platform", + "provides-extra", + "supported-platform", + "license-file", + "classifier", + "requires-dist", + "requires-external", + "project-url", + "provides-dist", + "obsoletes-dist", + "requires", # Deprecated + "obsoletes", # Deprecated + "provides", # Deprecated +} diff --git a/src/scikit_build_core/_vendor/pyproject_metadata/errors.py b/src/scikit_build_core/_vendor/pyproject_metadata/errors.py new file mode 100644 index 00000000..4fe90c2d --- /dev/null +++ b/src/scikit_build_core/_vendor/pyproject_metadata/errors.py @@ -0,0 +1,119 @@ +# SPDX-License-Identifier: MIT + +""" +This module defines exceptions and error handling utilities. It is the +recommend path to access ``ConfiguratonError``, ``ConfigurationWarning``, and +``ExceptionGroup``. For backward compatibility, ``ConfigurationError`` is +re-exported in the top-level package. +""" + +from __future__ import annotations + +import builtins +import contextlib +import dataclasses +import sys +import typing +import warnings + +__all__ = [ + "ConfigurationError", + "ConfigurationWarning", + "ExceptionGroup", +] + + +def __dir__() -> list[str]: + return __all__ + + +class ConfigurationError(Exception): + """Error in the backend metadata. Has an optional key attribute, which will be non-None + if the error is related to a single key in the pyproject.toml file.""" + + def __init__(self, msg: str, *, key: str | None = None): + super().__init__(msg) + self._key = key + + @property + def key(self) -> str | None: # pragma: no cover + return self._key + + +class ConfigurationWarning(UserWarning): + """Warnings about backend metadata.""" + + +if sys.version_info >= (3, 11): + ExceptionGroup = builtins.ExceptionGroup +else: + + class ExceptionGroup(Exception): + """A minimal implementation of `ExceptionGroup` from Python 3.11. + + Users can replace this with a more complete implementation, such as from + the exceptiongroup backport package, if better error messages and + integration with tooling is desired and the addition of a dependency is + acceptable. + """ + + message: str + exceptions: list[Exception] + + def __init__(self, message: str, exceptions: list[Exception]) -> None: + self.message = message + self.exceptions = exceptions + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})" + + +@dataclasses.dataclass +class ErrorCollector: + """ + Collect errors and raise them as a group at the end (if collect_errors is True), + otherwise raise them immediately. + """ + + collect_errors: bool + errors: list[Exception] = dataclasses.field(default_factory=list) + + def config_error( + self, + msg: str, + *, + key: str | None = None, + got: typing.Any = None, + got_type: type[typing.Any] | None = None, + warn: bool = False, + **kwargs: typing.Any, + ) -> None: + """Raise a configuration error, or add it to the error list.""" + msg = msg.format(key=f'"{key}"', **kwargs) + if got is not None: + msg = f"{msg} (got {got!r})" + if got_type is not None: + msg = f"{msg} (got {got_type.__name__})" + + if warn: + warnings.warn(msg, ConfigurationWarning, stacklevel=3) + elif self.collect_errors: + self.errors.append(ConfigurationError(msg, key=key)) + else: + raise ConfigurationError(msg, key=key) + + def finalize(self, msg: str) -> None: + """Raise a group exception if there are any errors.""" + if self.errors: + raise ExceptionGroup(msg, self.errors) + + @contextlib.contextmanager + def collect(self) -> typing.Generator[None, None, None]: + """Support nesting; add any grouped errors to the error list.""" + if self.collect_errors: + try: + yield + except ExceptionGroup as error: + self.errors.extend(error.exceptions) + else: + yield diff --git a/src/scikit_build_core/_vendor/pyproject_metadata/project_table.py b/src/scikit_build_core/_vendor/pyproject_metadata/project_table.py new file mode 100644 index 00000000..a92d66d4 --- /dev/null +++ b/src/scikit_build_core/_vendor/pyproject_metadata/project_table.py @@ -0,0 +1,152 @@ +# SPDX-License-Identifier: MIT + +""" +This module contains type definitions for the tables used in the +``pyproject.toml``. You should either import this at type-check time only, or +make sure ``typing_extensions`` is available for Python 3.10 and below. + +Documentation notice: the fields with hyphens are not shown due to a sphinx-autodoc bug. +""" + +from __future__ import annotations + +import sys +import typing +from typing import Any, Dict, List, Union + +if sys.version_info < (3, 11): + from typing_extensions import Required +else: + from typing import Required + +if sys.version_info < (3, 8): + from typing_extensions import Literal, TypedDict +else: + from typing import Literal, TypedDict + + +__all__ = [ + "BuildSystemTable", + "ContactTable", + "Dynamic", + "IncludeGroupTable", + "LicenseTable", + "ProjectTable", + "PyProjectTable", + "ReadmeTable", +] + + +def __dir__() -> list[str]: + return __all__ + + +class ContactTable(TypedDict, total=False): + name: str + email: str + + +class LicenseTable(TypedDict, total=False): + text: str + file: str + + +ReadmeTable = TypedDict( + "ReadmeTable", {"file": str, "text": str, "content-type": str}, total=False +) + +Dynamic = Literal[ + "authors", + "classifiers", + "dependencies", + "description", + "dynamic", + "entry-points", + "gui-scripts", + "keywords", + "license", + "maintainers", + "optional-dependencies", + "readme", + "requires-python", + "scripts", + "urls", + "version", +] + +ProjectTable = TypedDict( + "ProjectTable", + { + "name": Required[str], + "version": str, + "description": str, + "license": Union[LicenseTable, str], + "license-files": List[str], + "readme": Union[str, ReadmeTable], + "requires-python": str, + "dependencies": List[str], + "optional-dependencies": Dict[str, List[str]], + "entry-points": Dict[str, Dict[str, str]], + "authors": List[ContactTable], + "maintainers": List[ContactTable], + "urls": Dict[str, str], + "classifiers": List[str], + "keywords": List[str], + "scripts": Dict[str, str], + "gui-scripts": Dict[str, str], + "dynamic": List[Dynamic], + }, + total=False, +) + +BuildSystemTable = TypedDict( + "BuildSystemTable", + { + "build-backend": str, + "requires": List[str], + "backend-path": List[str], + }, + total=False, +) + +# total=False here because this could be +# extended in the future +IncludeGroupTable = TypedDict( + "IncludeGroupTable", + {"include-group": str}, + total=False, +) + +PyProjectTable = TypedDict( + "PyProjectTable", + { + "build-system": BuildSystemTable, + "project": ProjectTable, + "tool": Dict[str, Any], + "dependency-groups": Dict[str, List[Union[str, IncludeGroupTable]]], + }, + total=False, +) + +# Tests for type checking +if typing.TYPE_CHECKING: + PyProjectTable( + { + "build-system": BuildSystemTable( + {"build-backend": "one", "requires": ["two"]} + ), + "project": ProjectTable( + { + "name": "one", + "version": "0.1.0", + } + ), + "tool": {"thing": object()}, + "dependency-groups": { + "one": [ + "one", + IncludeGroupTable({"include-group": "two"}), + ] + }, + } + ) diff --git a/src/scikit_build_core/_vendor/pyproject_metadata/py.typed b/src/scikit_build_core/_vendor/pyproject_metadata/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/src/scikit_build_core/_vendor/pyproject_metadata/pyproject.py b/src/scikit_build_core/_vendor/pyproject_metadata/pyproject.py new file mode 100644 index 00000000..d1822e14 --- /dev/null +++ b/src/scikit_build_core/_vendor/pyproject_metadata/pyproject.py @@ -0,0 +1,450 @@ +# SPDX-License-Identifier: MIT + +""" +This module focues on reading pyproject.toml fields with error collection. It is +mostly internal, except for License and Readme classes, which are re-exported in +the top-level package. +""" + +from __future__ import annotations + +import dataclasses +import pathlib +import re +import typing + +import packaging.requirements + +from .errors import ErrorCollector + +if typing.TYPE_CHECKING: + from collections.abc import Generator, Iterable, Sequence + + from packaging.requirements import Requirement + + from .project_table import ContactTable, Dynamic, ProjectTable + + +__all__ = [ + "License", + "Readme", +] + + +def __dir__() -> list[str]: + return __all__ + + +@dataclasses.dataclass(frozen=True) +class License: + """ + This represents a classic license, which contains text, and optionally a + file path. Modern licenses are just SPDX identifiers, which are strings. + """ + + text: str + file: pathlib.Path | None + + +@dataclasses.dataclass(frozen=True) +class Readme: + """ + This represents a readme, which contains text and a content type, and + optionally a file path. + """ + + text: str + file: pathlib.Path | None + content_type: str + + +T = typing.TypeVar("T") + + +@dataclasses.dataclass +class PyProjectReader(ErrorCollector): + """Class for reading pyproject.toml fields with error collection. + + Unrelated errors are collected and raised at once if the `collect_errors` + parameter is set to `True`. Some methods will return None if an error was + raised. Most of them expect a non-None value as input to enforce the caller + to handle missing vs. error correctly. The exact design is based on usage, + as this is an internal class. + """ + + def ensure_str(self, value: str, key: str) -> str | None: + """Ensure that a value is a string.""" + if isinstance(value, str): + return value + + msg = "Field {key} has an invalid type, expecting a string" + self.config_error(msg, key=key, got_type=type(value)) + return None + + def ensure_list(self, val: list[T], key: str) -> list[T] | None: + """Ensure that a value is a list of strings.""" + if not isinstance(val, list): + msg = "Field {key} has an invalid type, expecting a list of strings" + self.config_error(msg, key=key, got_type=type(val)) + return None + for item in val: + if not isinstance(item, str): + msg = "Field {key} contains item with invalid type, expecting a string" + self.config_error(msg, key=key, got_type=type(item)) + return None + + return val + + def ensure_dict(self, val: dict[str, str], key: str) -> dict[str, str] | None: + """Ensure that a value is a dictionary of strings.""" + if not isinstance(val, dict): + msg = "Field {key} has an invalid type, expecting a table of strings" + self.config_error(msg, key=key, got_type=type(val)) + return None + for subkey, item in val.items(): + if not isinstance(item, str): + msg = "Field {key} has an invalid type, expecting a string" + self.config_error(msg, key=f"{key}.{subkey}", got_type=type(item)) + return None + return val + + def ensure_people( + self, val: Sequence[ContactTable], key: str + ) -> list[tuple[str, str | None]]: + """Ensure that a value is a list of tables with optional "name" and "email" keys.""" + if not isinstance(val, list): + msg = ( + "Field {key} has an invalid type, expecting a list of " + 'tables containing the "name" and/or "email" keys' + ) + self.config_error(msg, key=key, got_type=type(val)) + return [] + for each in val: + if not isinstance(each, dict): + msg = ( + "Field {key} has an invalid type, expecting a list of " + 'tables containing the "name" and/or "email" keys' + " (got list with {type_name})" + ) + self.config_error(msg, key=key, type_name=type(each).__name__) + return [] + for value in each.values(): + if not isinstance(value, str): + msg = ( + "Field {key} has an invalid type, expecting a list of " + 'tables containing the "name" and/or "email" keys' + " (got list with dict with {type_name})" + ) + self.config_error(msg, key=key, type_name=type(value).__name__) + return [] + extra_keys = set(each) - {"name", "email"} + if extra_keys: + msg = ( + "Field {key} has an invalid type, expecting a list of " + 'tables containing the "name" and/or "email" keys' + " (got list with dict with extra keys {extra_keys})" + ) + self.config_error( + msg, + key=key, + extra_keys=", ".join(sorted(f'"{k}"' for k in extra_keys)), + ) + return [] + return [(entry.get("name", "Unknown"), entry.get("email")) for entry in val] + + def get_license( + self, project: ProjectTable, project_dir: pathlib.Path + ) -> License | str | None: + """Get the license field from the project table. Handles PEP 639 style license too. + + None is returned if the license field is not present or if an error occurred. + """ + val = project.get("license") + if val is None: + return None + if isinstance(val, str): + return val + + if isinstance(val, dict): + _license = self.ensure_dict(val, "project.license") # type: ignore[arg-type] + if _license is None: + return None + else: + msg = "Field {key} has an invalid type, expecting a string or table of strings" + self.config_error(msg, key="project.license", got_type=type(val)) + return None + + for field in _license: + if field not in ("file", "text"): + msg = "Unexpected field {key}" + self.config_error(msg, key=f"project.license.{field}") + return None + + file: pathlib.Path | None = None + filename = _license.get("file") + text = _license.get("text") + + if (filename and text) or (not filename and not text): + msg = ( + 'Invalid {key} contents, expecting a string or one key "file" or "text"' + ) + self.config_error(msg, key="project.license", got=_license) + return None + + if filename: + file = project_dir.joinpath(filename) + if not file.is_file(): + msg = f"License file not found ({filename!r})" + self.config_error(msg, key="project.license.file") + return None + text = file.read_text(encoding="utf-8") + + assert text is not None + return License(text, file) + + def get_license_files( + self, project: ProjectTable, project_dir: pathlib.Path + ) -> list[pathlib.Path] | None: + """Get the license-files list of files from the project table. + + Returns None if an error occurred (including invalid globs, etc) or if + not present. + """ + license_files = project.get("license-files") + if license_files is None: + return None + if self.ensure_list(license_files, "project.license-files") is None: + return None + + return list(self._get_files_from_globs(project_dir, license_files)) + + def get_readme( # noqa: C901 + self, project: ProjectTable, project_dir: pathlib.Path + ) -> Readme | None: + """Get the text of the readme from the project table. + + Returns None if an error occurred or if the readme field is not present. + """ + if "readme" not in project: + return None + + filename: str | None = None + file: pathlib.Path | None = None + text: str | None = None + content_type: str | None = None + + readme = project["readme"] + if isinstance(readme, str): + # readme is a file + text = None + filename = readme + if filename.endswith(".md"): + content_type = "text/markdown" + elif filename.endswith(".rst"): + content_type = "text/x-rst" + else: + msg = "Could not infer content type for readme file {filename!r}" + self.config_error(msg, key="project.readme", filename=filename) + return None + elif isinstance(readme, dict): + # readme is a dict containing either 'file' or 'text', and content-type + for field in readme: + if field not in ("content-type", "file", "text"): + msg = "Unexpected field {key}" + self.config_error(msg, key=f"project.readme.{field}") + return None + + content_type_raw = readme.get("content-type") + if content_type_raw is not None: + content_type = self.ensure_str( + content_type_raw, "project.readme.content-type" + ) + if content_type is None: + return None + filename_raw = readme.get("file") + if filename_raw is not None: + filename = self.ensure_str(filename_raw, "project.readme.file") + if filename is None: + return None + + text_raw = readme.get("text") + if text_raw is not None: + text = self.ensure_str(text_raw, "project.readme.text") + if text is None: + return None + + if (filename and text) or (not filename and not text): + msg = 'Invalid {key} contents, expecting either "file" or "text"' + self.config_error(msg, key="project.readme", got=readme) + return None + if not content_type: + msg = "Field {key} missing" + self.config_error(msg, key="project.readme.content-type") + return None + else: + msg = "Field {key} has an invalid type, expecting either a string or table of strings" + self.config_error(msg, key="project.readme", got_type=type(readme)) + return None + + if filename: + file = project_dir.joinpath(filename) + if not file.is_file(): + msg = "Readme file not found ({filename!r})" + self.config_error(msg, key="project.readme.file", filename=filename) + return None + text = file.read_text(encoding="utf-8") + + assert text is not None + return Readme(text, file, content_type) + + def get_dependencies(self, project: ProjectTable) -> list[Requirement]: + """Get the dependencies from the project table.""" + + requirement_strings: list[str] | None = None + requirement_strings_raw = project.get("dependencies") + if requirement_strings_raw is not None: + requirement_strings = self.ensure_list( + requirement_strings_raw, "project.dependencies" + ) + if requirement_strings is None: + return [] + + requirements: list[Requirement] = [] + for req in requirement_strings: + try: + requirements.append(packaging.requirements.Requirement(req)) + except packaging.requirements.InvalidRequirement as e: + msg = "Field {key} contains an invalid PEP 508 requirement string {req!r} ({error!r})" + self.config_error(msg, key="project.dependencies", req=req, error=e) + return [] + return requirements + + def get_optional_dependencies( + self, + project: ProjectTable, + ) -> dict[str, list[Requirement]]: + """Get the optional dependencies from the project table.""" + + val = project.get("optional-dependencies") + if not val: + return {} + + requirements_dict: dict[str, list[Requirement]] = {} + if not isinstance(val, dict): + msg = "Field {key} has an invalid type, expecting a table of PEP 508 requirement strings" + self.config_error( + msg, key="project.optional-dependencies", got_type=type(val) + ) + return {} + for extra, requirements in val.copy().items(): + assert isinstance(extra, str) + if not isinstance(requirements, list): + msg = "Field {key} has an invalid type, expecting a table of PEP 508 requirement strings" + self.config_error( + msg, + key=f"project.optional-dependencies.{extra}", + got_type=type(requirements), + ) + return {} + requirements_dict[extra] = [] + for req in requirements: + if not isinstance(req, str): + msg = "Field {key} has an invalid type, expecting a PEP 508 requirement string" + self.config_error( + msg, + key=f"project.optional-dependencies.{extra}", + got_type=type(req), + ) + return {} + try: + requirements_dict[extra].append( + packaging.requirements.Requirement(req) + ) + except packaging.requirements.InvalidRequirement as e: + msg = ( + "Field {key} contains " + "an invalid PEP 508 requirement string {req!r} ({error!r})" + ) + self.config_error( + msg, + key=f"project.optional-dependencies.{extra}", + req=req, + error=e, + ) + return {} + return dict(requirements_dict) + + def get_entrypoints(self, project: ProjectTable) -> dict[str, dict[str, str]]: + """Get the entrypoints from the project table.""" + + val = project.get("entry-points", None) + if val is None: + return {} + if not isinstance(val, dict): + msg = "Field {key} has an invalid type, expecting a table of entrypoint sections" + self.config_error(msg, key="project.entry-points", got_type=type(val)) + return {} + for section, entrypoints in val.items(): + assert isinstance(section, str) + if not re.match(r"^\w+(\.\w+)*$", section): + msg = ( + "Field {key} has an invalid value, expecting a name " + "containing only alphanumeric, underscore, or dot characters" + ) + self.config_error(msg, key="project.entry-points", got=section) + return {} + if not isinstance(entrypoints, dict): + msg = ( + "Field {key} has an invalid type, expecting a table of entrypoints" + ) + self.config_error( + msg, + key=f"project.entry-points.{section}", + got_type=type(entrypoints), + ) + return {} + for name, entrypoint in entrypoints.items(): + assert isinstance(name, str) + if not isinstance(entrypoint, str): + msg = "Field {key} has an invalid type, expecting a string" + self.config_error( + msg, + key=f"project.entry-points.{section}.{name}", + got_type=type(entrypoint), + ) + return {} + return val + + def get_dynamic(self, project: ProjectTable) -> list[Dynamic]: + """Get the dynamic fields from the project table. + + Returns an empty list if the field is not present or if an error occurred. + """ + dynamic = project.get("dynamic", []) + + self.ensure_list(dynamic, "project.dynamic") + + if "name" in dynamic: + msg = "Unsupported field 'name' in {key}" + self.config_error(msg, key="project.dynamic") + return [] + + return dynamic + + def _get_files_from_globs( + self, project_dir: pathlib.Path, globs: Iterable[str] + ) -> Generator[pathlib.Path, None, None]: + """Given a list of globs, get files that match.""" + + for glob in globs: + if glob.startswith(("..", "/")): + msg = "{glob!r} is an invalid {key} glob: the pattern must match files within the project directory" + self.config_error(msg, key="project.license-files", glob=glob) + break + files = [f for f in project_dir.glob(glob) if f.is_file()] + if not files: + msg = "Every pattern in {key} must match at least one file: {glob!r} did not match any" + self.config_error(msg, key="project.license-files", glob=glob) + break + for f in files: + yield f.relative_to(project_dir) diff --git a/src/scikit_build_core/build/metadata.py b/src/scikit_build_core/build/metadata.py index fc788bb0..83b390fe 100644 --- a/src/scikit_build_core/build/metadata.py +++ b/src/scikit_build_core/build/metadata.py @@ -1,11 +1,18 @@ from __future__ import annotations import copy +import sys from typing import TYPE_CHECKING, Any from packaging.version import Version -from .._vendor.pyproject_metadata import StandardMetadata +from .._logging import logger +from .._vendor.pyproject_metadata import ( + StandardMetadata, + errors, + extras_build_system, + extras_top_level, +) from ..settings._load_provider import load_dynamic_metadata if TYPE_CHECKING: @@ -20,12 +27,20 @@ def __dir__() -> list[str]: return __all__ +# Use exceptiongroup backport +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + + errors.ExceptionGroup = ExceptionGroup # type: ignore[misc, assignment] + + # If pyproject-metadata eventually supports updates, this can be simplified def get_standard_metadata( pyproject_dict: Mapping[str, Any], settings: ScikitBuildSettings, ) -> StandardMetadata: new_pyproject_dict = copy.deepcopy(pyproject_dict) + # Handle any dynamic metadata for field, provider, config in load_dynamic_metadata(settings.metadata): if provider is None: @@ -37,7 +52,29 @@ def get_standard_metadata( new_pyproject_dict["project"][field] = provider.dynamic_metadata(field, config) new_pyproject_dict["project"]["dynamic"].remove(field) - metadata = StandardMetadata.from_pyproject(new_pyproject_dict) + if settings.strict_config: + extra_keys_top = extras_top_level(new_pyproject_dict) + if extra_keys_top: + logger.warning( + f"Unknown keys in top-level of pyproject.toml: {', '.join(extra_keys_top)}" + ) + extra_keys_build = extras_build_system(new_pyproject_dict) + if extra_keys_build: + logger.warning( + f"Unknown keys in build-system of pyproject.toml: {', '.join(extra_keys_build)}" + ) + + extra_validate = ( + settings.minimum_version is None or settings.minimum_version >= Version("0.10") + ) + if extra_validate: + allow_extra_keys: bool | None = not settings.strict_config + else: + allow_extra_keys = None if settings.strict_config else False + + metadata = StandardMetadata.from_pyproject( + new_pyproject_dict, all_errors=True, allow_extra_keys=allow_extra_keys + ) # For scikit-build-core < 0.5, we keep the normalized name for back-compat if settings.minimum_version is not None and settings.minimum_version < Version( @@ -54,4 +91,17 @@ def get_standard_metadata( msg = "Multiple lines in project.description are not supported; this is supposed to be a one line summary" raise ValueError(msg) + # Validate license if possible. + if isinstance(metadata.license, str): + try: + import packaging.licenses + + metadata.license = packaging.licenses.canonicalize_license_expression( + metadata.license + ) + except ImportError: + logger.warning( + "Packaging 24.2+ required for license normalization. Please update (Python 3.8+ required)" + ) + return metadata diff --git a/src/scikit_build_core/build/wheel.py b/src/scikit_build_core/build/wheel.py index a389df04..27236e0b 100644 --- a/src/scikit_build_core/build/wheel.py +++ b/src/scikit_build_core/build/wheel.py @@ -235,6 +235,11 @@ def _build_wheel_impl_impl( msg = "project.version is not specified, must be statically present or tool.scikit-build metadata.version.provider configured when dynamic" raise AssertionError(msg) + # Verify PEP 639 replaces license-files + if metadata.license_files is not None and settings.wheel.license_files: + msg = "Both project.license-files and tool.scikit-build.wheel.license-files are set, use only one" + raise AssertionError(msg) + # Get the closest (normally) importable name normalized_name = metadata.name.replace("-", "_").replace(".", "_") @@ -313,16 +318,30 @@ def _build_wheel_impl_impl( install_dir = wheel_dirs[targetlib] / settings.wheel.install_dir # Include the metadata license.file entry if provided - license_file_globs = list(settings.wheel.license_files) - if metadata.license and metadata.license.file: - license_file_globs.append(str(metadata.license.file)) - - for y in license_file_globs: - for x in Path().glob(y): - if x.is_file(): - path = wheel_dirs["metadata"] / "licenses" / x - path.parent.mkdir(parents=True, exist_ok=True) - shutil.copy(x, path) + if metadata.license_files: + license_paths = metadata.license_files + else: + license_file_globs = settings.wheel.license_files or [ + "LICEN[CS]E*", + "COPYING*", + "NOTICE*", + "AUTHORS*", + ] + if ( + metadata.license + and not isinstance(metadata.license, str) + and metadata.license.file + ): + license_file_globs.append(str(metadata.license.file)) + + license_paths = [ + x for y in license_file_globs for x in Path().glob(y) if x.is_file() + ] + + for x in license_paths: + path = wheel_dirs["metadata"] / "licenses" / x + path.parent.mkdir(parents=True, exist_ok=True) + shutil.copy(x, path) if ( settings.wheel.license_files diff --git a/src/scikit_build_core/resources/scikit-build.schema.json b/src/scikit_build_core/resources/scikit-build.schema.json index 57b35482..8d13b712 100644 --- a/src/scikit_build_core/resources/scikit-build.schema.json +++ b/src/scikit_build_core/resources/scikit-build.schema.json @@ -217,7 +217,7 @@ "items": { "type": "string" }, - "description": "A list of license files to include in the wheel. Supports glob patterns." + "description": "A list of license files to include in the wheel. Supports glob patterns. The default is ``[\"LICEN[CS]E*\", \"COPYING*\", \"NOTICE*\", \"AUTHORS*\"]``. Must not be set if ``project.license-files`` is set." }, "cmake": { "type": "boolean", diff --git a/src/scikit_build_core/settings/skbuild_model.py b/src/scikit_build_core/settings/skbuild_model.py index 77ced4be..e32ad38a 100644 --- a/src/scikit_build_core/settings/skbuild_model.py +++ b/src/scikit_build_core/settings/skbuild_model.py @@ -199,11 +199,11 @@ class WheelSettings: root, giving access to "/platlib", "/data", "/headers", and "/scripts". """ - license_files: List[str] = dataclasses.field( - default_factory=lambda: ["LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*"] - ) + license_files: Optional[List[str]] = None """ A list of license files to include in the wheel. Supports glob patterns. + The default is ``["LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*"]``. + Must not be set if ``project.license-files`` is set. """ cmake: bool = True diff --git a/tests/conftest.py b/tests/conftest.py index b3ee569f..d63ec7fa 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -211,10 +211,10 @@ def package_simple_pyproject_ext( ) -> PackageInfo: package = PackageInfo( "simple_pyproject_ext", - "5544d96810ce60ac40baf28cf8caf2e1e7e1fa7439b283d3fb52cdc1f87f12ac", - "aaa15c185dc3fbc301dc2fca937cc935442c38e55bc400fbefd424bd6ce92adb", - "ee3a564a37c42df3abdcee3862175baceeb6f6eff0b29931681b424ec5d96067", - "4c1d402621e7f00fce4ce5afdb73a9ba4cc25cd4bb57619113432841f779dd68", + "72cd37019e113cbabebdceb79bc867f8e1a4fc7323b6a272e6a77d6ea384d3e8", + "be15157c8659f258b73eb474616473d60945c4d45693796b55234ec83893263a", + "d97cd496aa6c46df2caf5064a7765588c831b8db9d7b46a536026ed951ce724a", + "b1182a2aa7a2b81365b3ad7ae1839b20d983ef10b6c3de16b681c23f536ca1b7", ) process_package(package, tmp_path, monkeypatch) return package @@ -334,6 +334,15 @@ def package_simple_purelib_package( return package +@pytest.fixture +def package_pep639_pure(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> PackageInfo: + package = PackageInfo( + "pep639_pure", + ) + process_package(package, tmp_path, monkeypatch) + return package + + def which_mock(name: str) -> str | None: if name in {"ninja", "ninja-build", "cmake3", "samu", "gmake", "make"}: return None diff --git a/tests/packages/pep639_pure/LICENSE1.txt b/tests/packages/pep639_pure/LICENSE1.txt new file mode 100644 index 00000000..e69de29b diff --git a/tests/packages/pep639_pure/nested/more/LICENSE2.txt b/tests/packages/pep639_pure/nested/more/LICENSE2.txt new file mode 100644 index 00000000..e69de29b diff --git a/tests/packages/pep639_pure/pyproject.toml b/tests/packages/pep639_pure/pyproject.toml new file mode 100644 index 00000000..a3fbf563 --- /dev/null +++ b/tests/packages/pep639_pure/pyproject.toml @@ -0,0 +1,12 @@ +[build-system] +requires = ["scikit-build-core"] +build-backend = "scikit_build_core.build" + +[project] +name = "pep639_pure" +version = "0.1.0" +license = "MIT" +license-files = ["LICENSE1.txt", "nested/more/LICENSE2.txt"] + +[tool.scikit-build] +wheel.cmake = false diff --git a/tests/test_prepare_metadata.py b/tests/test_prepare_metadata.py index daf68e32..b40ea1af 100644 --- a/tests/test_prepare_metadata.py +++ b/tests/test_prepare_metadata.py @@ -77,3 +77,14 @@ def test_multiline_description(): }, settings=ScikitBuildSettings(minimum_version=Version("0.8")), ) + + +def test_license_normalization(): + pytest.importorskip("packaging.licenses") + metadata = get_standard_metadata( + pyproject_dict={ + "project": {"name": "hello", "version": "1.1.1", "license": "ApacHE-2.0"} + }, + settings=ScikitBuildSettings(), + ) + assert metadata.license == "Apache-2.0" diff --git a/tests/test_pyproject_pep517.py b/tests/test_pyproject_pep517.py index 7977c918..046a676d 100644 --- a/tests/test_pyproject_pep517.py +++ b/tests/test_pyproject_pep517.py @@ -1,5 +1,6 @@ import gzip import hashlib +import inspect import shutil import sys import tarfile @@ -28,14 +29,6 @@ [gui_scripts] guithing = a.b:c """ -METADATA = """\ -Metadata-Version: 2.1 -Name: CMake.Example -Version: 0.0.1 -Requires-Python: >=3.7 -Provides-Extra: test -Requires-Dist: pytest>=6.0; extra == "test" -""" mark_hashes_different = pytest.mark.xfail( sys.platform.startswith(("win", "cygwin")), @@ -51,6 +44,19 @@ def compute_uncompressed_hash(inp: Path) -> str: @pytest.mark.usefixtures("package_simple_pyproject_ext") def test_pep517_sdist(): + expected_metadata = ( + inspect.cleandoc( + """ + Metadata-Version: 2.1 + Name: CMake.Example + Version: 0.0.1 + Requires-Python: >=3.7 + Provides-Extra: test + Requires-Dist: pytest>=6.0; extra == "test" + """ + ) + + "\n\n" + ) dist = Path("dist") out = build_sdist("dist") @@ -73,7 +79,7 @@ def test_pep517_sdist(): pkg_info = f.extractfile("cmake_example-0.0.1/PKG-INFO") assert pkg_info pkg_info_contents = pkg_info.read().decode() - assert pkg_info_contents == METADATA + assert pkg_info_contents == expected_metadata @mark_hashes_different @@ -359,3 +365,82 @@ def test_prepare_metdata_for_build_wheel_by_hand(tmp_path): assert metadata.get(k, None) == b assert len(metadata) == len(answer) + + +@pytest.mark.usefixtures("package_pep639_pure") +def test_pep639_license_files_metadata(): + metadata = build.util.project_wheel_metadata(str(Path.cwd()), isolated=False) + answer = { + "Metadata-Version": ["2.4"], + "Name": ["pep639_pure"], + "Version": ["0.1.0"], + "License-Expression": ["MIT"], + "License-File": ["LICENSE1.txt", "nested/more/LICENSE2.txt"], + } + + for k, b in answer.items(): + assert metadata.get_all(k, None) == b + + assert len(metadata) == sum(len(v) for v in answer.values()) + + +@pytest.mark.usefixtures("package_pep639_pure") +def test_pep639_license_files_sdist(): + expected_metadata = ( + inspect.cleandoc( + """ + Metadata-Version: 2.4 + Name: pep639_pure + Version: 0.1.0 + License-Expression: MIT + License-File: LICENSE1.txt + License-File: nested/more/LICENSE2.txt + """ + ) + + "\n\n" + ) + + dist = Path("dist") + out = build_sdist("dist") + + (sdist,) = dist.iterdir() + assert sdist.name == "pep639_pure-0.1.0.tar.gz" + assert sdist == dist / out + + with tarfile.open(sdist) as f: + file_names = set(f.getnames()) + assert file_names == { + f"pep639_pure-0.1.0/{x}" + for x in ( + "pyproject.toml", + "PKG-INFO", + "LICENSE1.txt", + "nested/more/LICENSE2.txt", + ) + } + pkg_info = f.extractfile("pep639_pure-0.1.0/PKG-INFO") + assert pkg_info + pkg_info_contents = pkg_info.read().decode() + assert pkg_info_contents == expected_metadata + + +@pytest.mark.usefixtures("package_pep639_pure") +def test_pep639_license_files_wheel(): + dist = Path("dist") + out = build_wheel("dist", {}) + (wheel,) = dist.glob("pep639_pure-0.1.0-*.whl") + assert wheel == dist / out + + with zipfile.ZipFile(wheel) as zf: + file_paths = {Path(p) for p in zf.namelist()} + with zf.open("pep639_pure-0.1.0.dist-info/METADATA") as f: + metadata = f.read().decode("utf-8") + + assert Path("pep639_pure-0.1.0.dist-info/licenses/LICENSE1.txt") in file_paths + assert ( + Path("pep639_pure-0.1.0.dist-info/licenses/nested/more/LICENSE2.txt") + in file_paths + ) + + assert "LICENSE1.txt" in metadata + assert "nested/more/LICENSE2.txt" in metadata diff --git a/tests/test_pyproject_pep518.py b/tests/test_pyproject_pep518.py index 70c6955a..49794b2b 100644 --- a/tests/test_pyproject_pep518.py +++ b/tests/test_pyproject_pep518.py @@ -34,6 +34,7 @@ def test_pep518_sdist(isolated, package_simple_pyproject_ext): Requires-Python: >=3.7 Provides-Extra: test Requires-Dist: pytest>=6.0; extra == "test" + """ ) @@ -76,6 +77,7 @@ def test_pep518_sdist_with_cmake_config(isolated, cleanup_overwrite): Metadata-Version: 2.1 Name: sdist_config Version: 0.1.0 + """ ) diff --git a/tests/test_schema.py b/tests/test_schema.py index 9be88973..a8d3b543 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -1,5 +1,6 @@ from __future__ import annotations +import sys from pathlib import Path from typing import Any @@ -22,12 +23,15 @@ def test_compare_schemas(): assert generate_skbuild_schema() == get_skbuild_schema() +SCHEMAS = [ + *DIR.parent.joinpath("docs/examples").glob("**/pyproject.toml"), + *DIR.joinpath("packages").glob("**/pyproject.toml"), +] + + @pytest.mark.parametrize( "filepath", - [ - *DIR.parent.joinpath("docs/examples").glob("**/pyproject.toml"), - *DIR.joinpath("packages").glob("**/pyproject.toml"), - ], + [s for s in SCHEMAS if sys.version_info >= (3, 8) or "pep639" not in str(s)], ) def test_valid_schemas_files(filepath: Path) -> None: api = pytest.importorskip("validate_pyproject.api") diff --git a/tests/test_skbuild_settings.py b/tests/test_skbuild_settings.py index d7eb14d4..90d4e429 100644 --- a/tests/test_skbuild_settings.py +++ b/tests/test_skbuild_settings.py @@ -46,12 +46,7 @@ def test_skbuild_settings_default(tmp_path: Path): assert settings.wheel.packages is None assert settings.wheel.py_api == "" assert not settings.wheel.expand_macos_universal_tags - assert settings.wheel.license_files == [ - "LICEN[CS]E*", - "COPYING*", - "NOTICE*", - "AUTHORS*", - ] + assert settings.wheel.license_files is None assert settings.wheel.exclude == [] assert settings.wheel.build_tag == "" assert settings.backport.find_python == Version("3.26.1") diff --git a/tests/test_wheelfile_utils.py b/tests/test_wheelfile_utils.py index 9e92ac57..b38d7b0a 100644 --- a/tests/test_wheelfile_utils.py +++ b/tests/test_wheelfile_utils.py @@ -46,7 +46,7 @@ def test_wheel_writer_simple(tmp_path, monkeypatch): dist_info = wheel.dist_info_contents() assert dist_info == { - "METADATA": b"Metadata-Version: 2.1\nName: something\nVersion: 1.2.3\n", + "METADATA": b"Metadata-Version: 2.1\nName: something\nVersion: 1.2.3\n\n", "WHEEL": b"Wheel-Version: 1.0\nGenerator: scikit-build-core 1.2.3\nRoot-Is-Purelib: false\nTag: py3-none-any\n\n", }