From aaea47d24d11f2af806a68bc79f7b8988898eceb Mon Sep 17 00:00:00 2001 From: Avasam Date: Sat, 8 Jul 2023 23:15:02 -0400 Subject: [PATCH 01/25] Fix all mypy issues --- .gitignore | 1 + mypy.ini | 5 -- pkg_resources/__init__.py | 17 ++-- pkg_resources/extern/__init__.py | 8 ++ pkg_resources/extern/packaging/__init__.pyi | 14 +++ pkg_resources/extern/packaging/_elffile.pyi | 1 + pkg_resources/extern/packaging/_manylinux.pyi | 1 + pkg_resources/extern/packaging/_musllinux.pyi | 1 + pkg_resources/extern/packaging/_parser.pyi | 1 + .../extern/packaging/_structures.pyi | 1 + pkg_resources/extern/packaging/_tokenizer.pyi | 1 + pkg_resources/extern/packaging/markers.pyi | 1 + pkg_resources/extern/packaging/metadata.pyi | 1 + .../extern/packaging/requirements.pyi | 1 + pkg_resources/extern/packaging/specifiers.pyi | 1 + pkg_resources/extern/packaging/tags.pyi | 1 + pkg_resources/extern/packaging/utils.pyi | 1 + pkg_resources/extern/packaging/version.pyi | 1 + pkg_resources/tests/test_pkg_resources.py | 3 +- pyproject.toml | 19 ++++ setup.cfg | 2 +- setup.py | 3 +- setuptools/__init__.py | 2 +- setuptools/command/build_ext.py | 36 ++++---- setuptools/command/build_py.py | 2 +- setuptools/command/easy_install.py | 17 +++- setuptools/command/rotate.py | 8 +- setuptools/command/upload_docs.py | 2 +- .../config/_validate_pyproject/__init__.py | 2 +- setuptools/config/expand.py | 2 +- setuptools/config/setupcfg.py | 2 +- setuptools/dist.py | 2 +- setuptools/extension.py | 2 +- setuptools/extern/__init__.py | 12 +++ setuptools/monkey.py | 7 +- setuptools/msvc.py | 3 +- setuptools/sandbox.py | 89 ++++++++++--------- .../tests/config/test_apply_pyprojecttoml.py | 3 +- .../integration/test_pip_install_sdist.py | 8 +- setuptools/tests/test_bdist_egg.py | 2 +- setuptools/tests/test_egg_info.py | 3 +- setuptools/tests/test_manifest.py | 3 +- setuptools/wheel.py | 2 +- tox.ini | 2 +- 44 files changed, 194 insertions(+), 102 deletions(-) delete mode 100644 mypy.ini create mode 100644 pkg_resources/extern/packaging/__init__.pyi create mode 100644 pkg_resources/extern/packaging/_elffile.pyi create mode 100644 pkg_resources/extern/packaging/_manylinux.pyi create mode 100644 pkg_resources/extern/packaging/_musllinux.pyi create mode 100644 pkg_resources/extern/packaging/_parser.pyi create mode 100644 pkg_resources/extern/packaging/_structures.pyi create mode 100644 pkg_resources/extern/packaging/_tokenizer.pyi create mode 100644 pkg_resources/extern/packaging/markers.pyi create mode 100644 pkg_resources/extern/packaging/metadata.pyi create mode 100644 pkg_resources/extern/packaging/requirements.pyi create mode 100644 pkg_resources/extern/packaging/specifiers.pyi create mode 100644 pkg_resources/extern/packaging/tags.pyi create mode 100644 pkg_resources/extern/packaging/utils.pyi create mode 100644 pkg_resources/extern/packaging/version.pyi diff --git a/.gitignore b/.gitignore index 90ae80505e..f06484974d 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ lib distribute.egg-info setuptools.egg-info .coverage +test_* .eggs .tox .venv diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index b6f972769e..0000000000 --- a/mypy.ini +++ /dev/null @@ -1,5 +0,0 @@ -[mypy] -ignore_missing_imports = True -# required to support namespace packages -# https://github.com/python/mypy/issues/14057 -explicit_package_bases = True diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 3baa1f3c24..e14c2dae85 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -23,6 +23,7 @@ import time import re import types +from typing import TYPE_CHECKING, Optional import zipfile import zipimport import warnings @@ -43,11 +44,7 @@ import importlib from pkgutil import get_importer -try: - import _imp -except ImportError: - # Python 3.2 compatibility - import imp as _imp +import _imp try: FileExistsError @@ -68,8 +65,10 @@ from os import open as os_open from os.path import isdir, split +importlib_machinery: Optional[types.ModuleType] try: - import importlib.machinery as importlib_machinery + import importlib.machinery + importlib_machinery = importlib.machinery # access attribute to force import under delayed import mechanisms. importlib_machinery.__name__ @@ -2231,7 +2230,8 @@ def resolve_egg_link(path): if hasattr(pkgutil, 'ImpImporter'): register_finder(pkgutil.ImpImporter, find_on_path) -register_finder(importlib_machinery.FileFinder, find_on_path) +if importlib_machinery: + register_finder(importlib_machinery.FileFinder, find_on_path) _declare_state('dict', _namespace_handlers={}) _declare_state('dict', _namespace_packages={}) @@ -2398,7 +2398,8 @@ def file_ns_handler(importer, path_item, packageName, module): register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) register_namespace_handler(zipimport.zipimporter, file_ns_handler) -register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) +if importlib_machinery: + register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) def null_ns_handler(importer, path_item, packageName, module): diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py index 948bcc6094..0b0eaa2927 100644 --- a/pkg_resources/extern/__init__.py +++ b/pkg_resources/extern/__init__.py @@ -1,5 +1,6 @@ import importlib.util import sys +from typing import TYPE_CHECKING class VendorImporter: @@ -78,3 +79,10 @@ def install(self): 'more_itertools', ) VendorImporter(__name__, names).install() + +if TYPE_CHECKING: + from . import packaging as packaging + import platformdirs as platformdirs + import jaraco as jaraco + import importlib_resources as importlib_resources + import more_itertools as more_itertools \ No newline at end of file diff --git a/pkg_resources/extern/packaging/__init__.pyi b/pkg_resources/extern/packaging/__init__.pyi new file mode 100644 index 0000000000..a6966e6f13 --- /dev/null +++ b/pkg_resources/extern/packaging/__init__.pyi @@ -0,0 +1,14 @@ +from packaging import * +from . import _elffile as _elffile +from . import _manylinux as _manylinux +from . import _musllinux as _musllinux +from . import _parser as _parser +from . import _structures as _structures +from . import _tokenizer as _tokenizer +from . import markers as markers +from . import metadata as metadata +from . import requirements as requirements +from . import specifiers as specifiers +from . import tags as tags +from . import utils as utils +from . import version as version diff --git a/pkg_resources/extern/packaging/_elffile.pyi b/pkg_resources/extern/packaging/_elffile.pyi new file mode 100644 index 0000000000..9abaf0ce9d --- /dev/null +++ b/pkg_resources/extern/packaging/_elffile.pyi @@ -0,0 +1 @@ +from packaging._elffile import * diff --git a/pkg_resources/extern/packaging/_manylinux.pyi b/pkg_resources/extern/packaging/_manylinux.pyi new file mode 100644 index 0000000000..8178e0cc65 --- /dev/null +++ b/pkg_resources/extern/packaging/_manylinux.pyi @@ -0,0 +1 @@ +from packaging._manylinux import * diff --git a/pkg_resources/extern/packaging/_musllinux.pyi b/pkg_resources/extern/packaging/_musllinux.pyi new file mode 100644 index 0000000000..f200d41384 --- /dev/null +++ b/pkg_resources/extern/packaging/_musllinux.pyi @@ -0,0 +1 @@ +from packaging._musllinux import * diff --git a/pkg_resources/extern/packaging/_parser.pyi b/pkg_resources/extern/packaging/_parser.pyi new file mode 100644 index 0000000000..2b70184caa --- /dev/null +++ b/pkg_resources/extern/packaging/_parser.pyi @@ -0,0 +1 @@ +from packaging._parser import * diff --git a/pkg_resources/extern/packaging/_structures.pyi b/pkg_resources/extern/packaging/_structures.pyi new file mode 100644 index 0000000000..7a15df2a05 --- /dev/null +++ b/pkg_resources/extern/packaging/_structures.pyi @@ -0,0 +1 @@ +from packaging._structures import * diff --git a/pkg_resources/extern/packaging/_tokenizer.pyi b/pkg_resources/extern/packaging/_tokenizer.pyi new file mode 100644 index 0000000000..c7ff4583c8 --- /dev/null +++ b/pkg_resources/extern/packaging/_tokenizer.pyi @@ -0,0 +1 @@ +from packaging._tokenizer import * diff --git a/pkg_resources/extern/packaging/markers.pyi b/pkg_resources/extern/packaging/markers.pyi new file mode 100644 index 0000000000..78d2fd90f8 --- /dev/null +++ b/pkg_resources/extern/packaging/markers.pyi @@ -0,0 +1 @@ +from packaging.markers import * diff --git a/pkg_resources/extern/packaging/metadata.pyi b/pkg_resources/extern/packaging/metadata.pyi new file mode 100644 index 0000000000..8c305b5830 --- /dev/null +++ b/pkg_resources/extern/packaging/metadata.pyi @@ -0,0 +1 @@ +from packaging.metadata import * diff --git a/pkg_resources/extern/packaging/requirements.pyi b/pkg_resources/extern/packaging/requirements.pyi new file mode 100644 index 0000000000..ba0d040352 --- /dev/null +++ b/pkg_resources/extern/packaging/requirements.pyi @@ -0,0 +1 @@ +from packaging.requirements import * diff --git a/pkg_resources/extern/packaging/specifiers.pyi b/pkg_resources/extern/packaging/specifiers.pyi new file mode 100644 index 0000000000..5e4e2fb7a5 --- /dev/null +++ b/pkg_resources/extern/packaging/specifiers.pyi @@ -0,0 +1 @@ +from packaging.specifiers import * diff --git a/pkg_resources/extern/packaging/tags.pyi b/pkg_resources/extern/packaging/tags.pyi new file mode 100644 index 0000000000..4a383c412c --- /dev/null +++ b/pkg_resources/extern/packaging/tags.pyi @@ -0,0 +1 @@ +from packaging.tags import * diff --git a/pkg_resources/extern/packaging/utils.pyi b/pkg_resources/extern/packaging/utils.pyi new file mode 100644 index 0000000000..87f9c84f83 --- /dev/null +++ b/pkg_resources/extern/packaging/utils.pyi @@ -0,0 +1 @@ +from packaging.utils import * diff --git a/pkg_resources/extern/packaging/version.pyi b/pkg_resources/extern/packaging/version.pyi new file mode 100644 index 0000000000..5f4f9dcdf7 --- /dev/null +++ b/pkg_resources/extern/packaging/version.pyi @@ -0,0 +1 @@ +from packaging.version import * diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py index a05aeb2603..9f3ce4b4fe 100644 --- a/pkg_resources/tests/test_pkg_resources.py +++ b/pkg_resources/tests/test_pkg_resources.py @@ -8,6 +8,7 @@ import stat import distutils.dist import distutils.command.install_egg_info +from typing import List from unittest import mock @@ -42,7 +43,7 @@ def __call__(self): class TestZipProvider: - finalizers = [] + finalizers: List[EggRemover] = [] ref_time = datetime.datetime(2013, 5, 12, 13, 25, 0) "A reference time for a file modification" diff --git a/pyproject.toml b/pyproject.toml index eae729c05a..25240c80e7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,3 +11,22 @@ extend_exclude = "_vendor" [tool.pytest-enabler.mypy] # disabled + + +[tool.mypy] +# CI should test for all versions, local development gets hints for oldest supported +python_version = "3.8" +strict = false +# TODO: Not all dependencies are typed. setuptools itself should be typed too +# TODO: Test environment is not yet properly configured to install all imported packages +ignore_missing_imports = true +# required to support namespace packages: https://github.com/python/mypy/issues/14057 +explicit_package_bases = true +exclude = [ + "^build/", + # Duplicate module name + "^pkg_resources/tests/data/my-test-package-source/setup.py$", + # Vendored + "^.+?/_vendor", + "^setuptools/_distutils", +] diff --git a/setup.cfg b/setup.cfg index fc7c21f1a0..89c2f3d770 100644 --- a/setup.cfg +++ b/setup.cfg @@ -49,7 +49,7 @@ testing = pytest-cov; \ # coverage seems to make PyPy extremely slow python_implementation != "PyPy" - pytest-mypy >= 0.9.1; \ + pytest-mypy >= 0.10.3; \ # workaround for jaraco/skeleton#22 python_implementation != "PyPy" pytest-enabler >= 2.2 diff --git a/setup.py b/setup.py index 075d7c405f..f05017e6f0 100755 --- a/setup.py +++ b/setup.py @@ -88,5 +88,6 @@ def _restore_install_lib(self): if __name__ == '__main__': # allow setup.py to run from another directory - here and os.chdir(here) + if here: + os.chdir(here) dist = setuptools.setup(**setup_params) diff --git a/setuptools/__init__.py b/setuptools/__init__.py index 52d424bdf0..d81b75670a 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -113,7 +113,7 @@ def setup(**attrs): _Command = monkey.get_unpatched(distutils.core.Command) -class Command(_Command): +class Command(_Command): # type: ignore[valid-type, misc] # https://github.com/python/mypy/issues/14458 """ Setuptools internal actions are organized using a *command design pattern*. This means that each action (or group of closely related actions) executed during diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 574fa8e8ce..d62c398639 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -3,9 +3,8 @@ import itertools from importlib.machinery import EXTENSION_SUFFIXES from importlib.util import cache_from_source as _compiled_file_name -from typing import Dict, Iterator, List, Tuple +from typing import TYPE_CHECKING, Dict, Iterator, List, Tuple -from distutils.command.build_ext import build_ext as _du_build_ext from distutils.ccompiler import new_compiler from distutils.sysconfig import customize_compiler, get_config_var from distutils import log @@ -13,19 +12,22 @@ from setuptools.errors import BaseError from setuptools.extension import Extension, Library -try: - # Attempt to use Cython for building extensions, if available - from Cython.Distutils.build_ext import build_ext as _build_ext +if TYPE_CHECKING: + from distutils.command.build_ext import build_ext as _build_ext +else: + try: + # Attempt to use Cython for building extensions, if available + from Cython.Distutils.build_ext import build_ext as _build_ext - # Additionally, assert that the compiler module will load - # also. Ref #1229. - __import__('Cython.Compiler.Main') -except ImportError: - _build_ext = _du_build_ext + # Additionally, assert that the compiler module will load + # also. Ref #1229. + __import__('Cython.Compiler.Main') + except ImportError: + from distutils.command.build_ext import build_ext as _build_ext # make sure _config_vars is initialized get_config_var("LDSHARED") -from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa +from distutils.sysconfig import _config_vars as _CONFIG_VARS # type: ignore # noqa # Not publicly exposed in distutils stubs def _customize_compiler_for_shlib(compiler): @@ -126,7 +128,7 @@ def _get_output_mapping(self) -> Iterator[Tuple[str, str]]: return build_py = self.get_finalized_command('build_py') - opt = self.get_finalized_command('install_lib').optimize or "" + opt = self.get_finalized_command('install_lib').optimize or "" # type: ignore[attr-defined] # TODO: Fix in distutils stubs for ext in self.extensions: inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext) @@ -296,7 +298,7 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False): log.info("writing stub loader for %s to %s", ext._full_name, stub_file) if compile and os.path.exists(stub_file): raise BaseError(stub_file + " already exists! Please delete.") - if not self.dry_run: + if not self.dry_run: # type: ignore[attr-defined] # TODO: Fix in distutils stubs f = open(stub_file, 'w') f.write( '\n'.join( @@ -334,13 +336,13 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False): def _compile_and_remove_stub(self, stub_file: str): from distutils.util import byte_compile - byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run) - optimize = self.get_finalized_command('install_lib').optimize + byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run) # type: ignore[attr-defined] # TODO: Fix in distutils stubs + optimize = self.get_finalized_command('install_lib').optimize # type: ignore[attr-defined] # TODO: Fix in distutils stubs if optimize > 0: byte_compile( - [stub_file], optimize=optimize, force=True, dry_run=self.dry_run + [stub_file], optimize=optimize, force=True, dry_run=self.dry_run # type: ignore[attr-defined] # TODO: Fix in distutils stubs ) - if os.path.exists(stub_file) and not self.dry_run: + if os.path.exists(stub_file) and not self.dry_run: # type: ignore[attr-defined] # TODO: Fix in distutils stubs os.unlink(stub_file) diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index 5709eb6d8c..198bee83df 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -215,7 +215,7 @@ def _filter_build_files(self, files: Iterable[str], egg_info: str) -> Iterator[s This function should filter this case of invalid files out. """ build = self.get_finalized_command("build") - build_dirs = (egg_info, self.build_lib, build.build_temp, build.build_base) + build_dirs = (egg_info, self.build_lib, build.build_temp, build.build_base) # type: ignore[attr-defined] # TODO: Fix in distutils stubs norm_dirs = [os.path.normpath(p) for p in build_dirs if p] for file in files: diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 8ba4f094de..2e5d4aa590 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -25,6 +25,7 @@ from distutils.command import install import sys import os +from typing import TYPE_CHECKING, Dict, List, Optional, Union import zipimport import shutil import tempfile @@ -78,6 +79,8 @@ from .._path import ensure_directory from ..extern.jaraco.text import yield_lines +_FileDescriptorOrPath = Union[int, str, bytes, os.PathLike[str], os.PathLike[bytes]] + # Turn on PEP440Warnings warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) @@ -1774,7 +1777,7 @@ def _wrap_lines(cls, lines): if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite': - PthDistributions = RewritePthDistributions + PthDistributions = RewritePthDistributions # type: ignore[misc] # Overwriting type def _first_line_re(): @@ -2024,7 +2027,13 @@ def is_python_script(script_text, filename): from os import chmod as _chmod except ImportError: # Jython compatibility - def _chmod(*args): + def _chmod( + path: _FileDescriptorOrPath, + mode: int, + *, + dir_fd: Optional[int] = None, + follow_symlinks: bool = True + ) -> None: pass @@ -2042,8 +2051,8 @@ class CommandSpec(list): those passed to Popen. """ - options = [] - split_args = dict() + options: List[str] = [] + split_args: Dict[str, bool] = dict() @classmethod def best(cls): diff --git a/setuptools/command/rotate.py b/setuptools/command/rotate.py index cfb78ce52d..eb56109939 100644 --- a/setuptools/command/rotate.py +++ b/setuptools/command/rotate.py @@ -3,6 +3,7 @@ from distutils.errors import DistutilsOptionError import os import shutil +from typing import List from setuptools import Command @@ -17,7 +18,7 @@ class rotate(Command): ('keep=', 'k', "number of matching distributions to keep"), ] - boolean_options = [] + boolean_options: List[str] = [] def initialize_options(self): self.match = None @@ -37,7 +38,8 @@ def finalize_options(self): except ValueError as e: raise DistutilsOptionError("--keep must be an integer") from e if isinstance(self.match, str): - self.match = [convert_path(p.strip()) for p in self.match.split(',')] + self.match = [convert_path(p.strip()) + for p in self.match.split(',')] self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) def run(self): @@ -52,7 +54,7 @@ def run(self): files.reverse() log.info("%d file(s) matching %s", len(files), pattern) - files = files[self.keep :] + files = files[self.keep:] for t, f in files: log.info("Deleting %s", f) if not self.dry_run: diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py index 27c98b7c30..c4ef83d381 100644 --- a/setuptools/command/upload_docs.py +++ b/setuptools/command/upload_docs.py @@ -51,7 +51,7 @@ def has_sphinx(self): and metadata.entry_points(group='distutils.commands', name='build_sphinx') ) - sub_commands = [('build_sphinx', has_sphinx)] + sub_commands = [('build_sphinx', has_sphinx)] # type: ignore[list-item] # TODO: Fix in distutils stubs def initialize_options(self): upload.initialize_options(self) diff --git a/setuptools/config/_validate_pyproject/__init__.py b/setuptools/config/_validate_pyproject/__init__.py index dbe6cb4ca4..b605c9ef4a 100644 --- a/setuptools/config/_validate_pyproject/__init__.py +++ b/setuptools/config/_validate_pyproject/__init__.py @@ -5,7 +5,7 @@ from .error_reporting import detailed_errors, ValidationError from .extra_validations import EXTRA_VALIDATIONS from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException -from .fastjsonschema_validations import validate as _validate +from .fastjsonschema_validations import validate as _validate # type: ignore[attr-defined] # mypy false-positive. Pyright is fine here __all__ = [ "validate", diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index 518f5ac260..ec07857fac 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -64,7 +64,7 @@ class StaticModule: """Proxy to a module object that avoids executing arbitrary code.""" def __init__(self, name: str, spec: ModuleSpec): - module = ast.parse(pathlib.Path(spec.origin).read_bytes()) + module = ast.parse(pathlib.Path(spec.origin).read_bytes()) # type: ignore[arg-type] # Let it raise an error on None vars(self).update(locals()) del self.self diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index bb35559069..ae1682dfa1 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -107,7 +107,7 @@ def _apply( filenames = [*other_files, filepath] try: - _Distribution.parse_config_files(dist, filenames=filenames) + _Distribution.parse_config_files(dist, filenames=filenames) # type: ignore[arg-type] # TODO: fix in disutils stubs handlers = parse_configuration( dist, dist.command_options, ignore_option_errors=ignore_option_errors ) diff --git a/setuptools/dist.py b/setuptools/dist.py index 429606f5d1..586b8c9d8a 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -393,7 +393,7 @@ def check_packages(dist, attr, value): _Distribution = get_unpatched(distutils.core.Distribution) -class Distribution(_Distribution): +class Distribution(_Distribution): # type: ignore[valid-type, misc] # https://github.com/python/mypy/issues/14458 """Distribution with support for tests and package data This is an enhanced version of 'distutils.dist.Distribution' that diff --git a/setuptools/extension.py b/setuptools/extension.py index 58c023f6b4..9eea0d6930 100644 --- a/setuptools/extension.py +++ b/setuptools/extension.py @@ -27,7 +27,7 @@ def _have_cython(): _Extension = get_unpatched(distutils.core.Extension) -class Extension(_Extension): +class Extension(_Extension): # type: ignore[valid-type, misc] # https://github.com/python/mypy/issues/14458 """ Describes a single extension module. diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py index 67c4a4552f..0b29df4e36 100644 --- a/setuptools/extern/__init__.py +++ b/setuptools/extern/__init__.py @@ -1,5 +1,6 @@ import importlib.util import sys +from typing import TYPE_CHECKING class VendorImporter: @@ -82,3 +83,14 @@ def install(self): 'tomli', ) VendorImporter(__name__, names, 'setuptools._vendor').install() + +if TYPE_CHECKING: + import packaging as packaging + import ordered_set as ordered_set + import more_itertools as more_itertools + import importlib_metadata as importlib_metadata + import zipp as zipp + import importlib_resources as importlib_resources + import jaraco as jaraco + import typing_extensions as typing_extensions + import tomli as tomli diff --git a/setuptools/monkey.py b/setuptools/monkey.py index 901be43d48..d60d128eef 100644 --- a/setuptools/monkey.py +++ b/setuptools/monkey.py @@ -9,10 +9,13 @@ import functools from importlib import import_module import inspect +from typing import List, TypeVar import setuptools -__all__ = [] +_T = TypeVar("_T") + +__all__: List[str] = [] """ Everything is private. Contact the project team if you think you need this functionality. @@ -33,7 +36,7 @@ def _get_mro(cls): return inspect.getmro(cls) -def get_unpatched(item): +def get_unpatched(item: _T) -> _T: lookup = ( get_unpatched_class if isinstance(item, type) diff --git a/setuptools/msvc.py b/setuptools/msvc.py index 5785c16945..10428b359c 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -21,9 +21,10 @@ import itertools import subprocess import distutils.errors +import sys from setuptools.extern.more_itertools import unique_everseen -if platform.system() == 'Windows': +if sys.platform == "win32": import winreg from os import environ else: diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py index 017c897b86..806f9de81e 100644 --- a/setuptools/sandbox.py +++ b/setuptools/sandbox.py @@ -9,6 +9,7 @@ import pickle import textwrap import builtins +from typing import Union, List import pkg_resources from distutils.errors import DistutilsError @@ -19,7 +20,7 @@ else: _os = sys.modules[os.name] try: - _file = file + _file = file # type: ignore[name-defined] # Check for global variable except NameError: _file = None _open = open @@ -263,6 +264,48 @@ def run_setup(setup_script, args): # Normal exit, just return +def _mk_dual_path_wrapper(name): + original = getattr(_os, name) + + def wrap(self: AbstractSandbox, src, dst, *args, **kw): + if self._active: + src, dst = self._remap_pair(name, src, dst, *args, **kw) + return original(src, dst, *args, **kw) + + return wrap + +def _mk_single_path_wrapper(name, original=None): + original = original or getattr(_os, name) + + def wrap(self: AbstractSandbox, path, *args, **kw): + if self._active: + path = self._remap_input(name, path, *args, **kw) + return original(path, *args, **kw) + + return wrap + +def _mk_single_with_return(name): + original = getattr(_os, name) + + def wrap(self: AbstractSandbox, path, *args, **kw): + if self._active: + path = self._remap_input(name, path, *args, **kw) + return self._remap_output(name, original(path, *args, **kw)) + return original(path, *args, **kw) + + return wrap + +def _mk_query(name): + original = getattr(_os, name) + + def wrap(self: AbstractSandbox, *args, **kw): + retval = original(*args, **kw) + if self._active: + return self._remap_output(name, retval) + return retval + + return wrap + class AbstractSandbox: """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" @@ -298,30 +341,10 @@ def run(self, func): with self: return func() - def _mk_dual_path_wrapper(name): - original = getattr(_os, name) - - def wrap(self, src, dst, *args, **kw): - if self._active: - src, dst = self._remap_pair(name, src, dst, *args, **kw) - return original(src, dst, *args, **kw) - - return wrap - for name in ["rename", "link", "symlink"]: if hasattr(_os, name): locals()[name] = _mk_dual_path_wrapper(name) - def _mk_single_path_wrapper(name, original=None): - original = original or getattr(_os, name) - - def wrap(self, path, *args, **kw): - if self._active: - path = self._remap_input(name, path, *args, **kw) - return original(path, *args, **kw) - - return wrap - if _file: _file = _mk_single_path_wrapper('file', _file) _open = _mk_single_path_wrapper('open', _open) @@ -349,32 +372,10 @@ def wrap(self, path, *args, **kw): if hasattr(_os, name): locals()[name] = _mk_single_path_wrapper(name) - def _mk_single_with_return(name): - original = getattr(_os, name) - - def wrap(self, path, *args, **kw): - if self._active: - path = self._remap_input(name, path, *args, **kw) - return self._remap_output(name, original(path, *args, **kw)) - return original(path, *args, **kw) - - return wrap - for name in ['readlink', 'tempnam']: if hasattr(_os, name): locals()[name] = _mk_single_with_return(name) - def _mk_query(name): - original = getattr(_os, name) - - def wrap(self, *args, **kw): - retval = original(*args, **kw) - if self._active: - return self._remap_output(name, retval) - return retval - - return wrap - for name in ['getcwd', 'tmpnam']: if hasattr(_os, name): locals()[name] = _mk_query(name) @@ -426,7 +427,7 @@ class DirectorySandbox(AbstractSandbox): ] ) - _exception_patterns = [] + _exception_patterns: List[Union[str, re.Pattern]] = [] "exempt writing to paths that match the pattern" def __init__(self, sandbox, exceptions=_EXCEPTIONS): diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index fd240f8674..358913506f 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -8,6 +8,7 @@ import tarfile from inspect import cleandoc from pathlib import Path +from typing import Tuple from unittest.mock import Mock from zipfile import ZipFile @@ -425,7 +426,7 @@ def core_metadata(dist) -> str: dist.metadata.write_pkg_file(buffer) pkg_file_txt = buffer.getvalue() - skip_prefixes = () + skip_prefixes: Tuple[str, ...] = () skip_lines = set() # ---- DIFF NORMALISATION ---- # PEP 621 is very particular about author/maintainer metadata conversion, so skip diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py index 3671abbc4f..8c394ed28a 100644 --- a/setuptools/tests/integration/test_pip_install_sdist.py +++ b/setuptools/tests/integration/test_pip_install_sdist.py @@ -17,6 +17,7 @@ from enum import Enum from glob import glob from hashlib import md5 +from typing_extensions import reveal_type from urllib.request import urlopen import pytest @@ -27,7 +28,12 @@ pytestmark = pytest.mark.integration -(LATEST,) = Enum("v", "LATEST") + +class v(Enum): + LATEST = 1 + + +(LATEST,) = v """Default version to be checked""" # There are positive and negative aspects of checking the latest version of the # packages. diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py index 45dd070967..918767e347 100644 --- a/setuptools/tests/test_bdist_egg.py +++ b/setuptools/tests/test_bdist_egg.py @@ -47,7 +47,7 @@ def test_bdist_egg(self, setup_context, user_override): assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content) @pytest.mark.xfail( - os.environ.get('PYTHONDONTWRITEBYTECODE'), + os.environ.get('PYTHONDONTWRITEBYTECODE', False), reason="Byte code disabled", ) def test_exclude_source_files(self, setup_context, user_override): diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py index 614fca7a23..885689a808 100644 --- a/setuptools/tests/test_egg_info.py +++ b/setuptools/tests/test_egg_info.py @@ -83,7 +83,8 @@ def run(): @staticmethod def _extract_mv_version(pkg_info_lines: List[str]) -> Tuple[int, int]: version_str = pkg_info_lines[0].split(' ')[1] - return tuple(map(int, version_str.split('.')[:2])) + major, minor, *_ = map(int, version_str.split('.')) + return major, minor def test_egg_info_save_version_info_setup_empty(self, tmpdir_cwd, env): """ diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py index 33b85d0644..304f831263 100644 --- a/setuptools/tests/test_manifest.py +++ b/setuptools/tests/test_manifest.py @@ -10,6 +10,7 @@ import logging from distutils import log from distutils.errors import DistutilsTemplateError +from typing import List, Tuple from setuptools.command.egg_info import FileList, egg_info, translate_pattern from setuptools.dist import Distribution @@ -75,7 +76,7 @@ def touch(filename): ) -translate_specs = [ +translate_specs: List[Tuple[str, List[str], List[str]]] = [ ('foo', ['foo'], ['bar', 'foobar']), ('foo/bar', ['foo/bar'], ['foo/bar/baz', './foo/bar', 'foo']), # Glob matching diff --git a/setuptools/wheel.py b/setuptools/wheel.py index c6eabddc1f..40e26ca706 100644 --- a/setuptools/wheel.py +++ b/setuptools/wheel.py @@ -14,7 +14,7 @@ import setuptools from setuptools.extern.packaging.version import Version as parse_version from setuptools.extern.packaging.tags import sys_tags -from setuptools.extern.packaging.utils import canonicalize_name +from packaging.utils import canonicalize_name from setuptools.command.egg_info import write_requirements, _egg_basename from setuptools.archive_util import _unpack_zipfile_obj diff --git a/tox.ini b/tox.ini index 0ecc4f4391..22cc86de6c 100644 --- a/tox.ini +++ b/tox.ini @@ -5,7 +5,7 @@ toxworkdir={env:TOX_WORK_DIR:.tox} deps = # Ideally all the dependencies should be set as "extras" # workaround for pytest-dev/execnet#195 - execnet @ git+https://github.com/jaraco/execnet@bugfix/195-encodingwarning + execnet>=2.0.0 # workaround for pypa/build#630 build[virtualenv] @ git+https://github.com/jaraco/build@bugfix/630-importlib-metadata setenv = From 11fd06ce36d6e21e12a84ed8ff3b6caab2c48b9d Mon Sep 17 00:00:00 2001 From: Avasam Date: Sat, 8 Jul 2023 23:54:06 -0400 Subject: [PATCH 02/25] Ran black --- pkg_resources/__init__.py | 1 + pkg_resources/extern/__init__.py | 2 +- setuptools/command/easy_install.py | 2 +- setuptools/command/rotate.py | 5 ++--- .../config/_validate_pyproject/__init__.py | 2 +- setuptools/config/expand.py | 2 +- setuptools/dist.py | 2 +- setuptools/extern/__init__.py | 18 +++++++++--------- setuptools/sandbox.py | 4 ++++ setuptools/wheel.py | 2 +- 10 files changed, 22 insertions(+), 18 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index e14c2dae85..ad877f7f00 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -68,6 +68,7 @@ importlib_machinery: Optional[types.ModuleType] try: import importlib.machinery + importlib_machinery = importlib.machinery # access attribute to force import under delayed import mechanisms. diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py index 0b0eaa2927..c7bb6ccad5 100644 --- a/pkg_resources/extern/__init__.py +++ b/pkg_resources/extern/__init__.py @@ -85,4 +85,4 @@ def install(self): import platformdirs as platformdirs import jaraco as jaraco import importlib_resources as importlib_resources - import more_itertools as more_itertools \ No newline at end of file + import more_itertools as more_itertools diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 2e5d4aa590..1ba8432322 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -2032,7 +2032,7 @@ def _chmod( mode: int, *, dir_fd: Optional[int] = None, - follow_symlinks: bool = True + follow_symlinks: bool = True, ) -> None: pass diff --git a/setuptools/command/rotate.py b/setuptools/command/rotate.py index eb56109939..6f73721c70 100644 --- a/setuptools/command/rotate.py +++ b/setuptools/command/rotate.py @@ -38,8 +38,7 @@ def finalize_options(self): except ValueError as e: raise DistutilsOptionError("--keep must be an integer") from e if isinstance(self.match, str): - self.match = [convert_path(p.strip()) - for p in self.match.split(',')] + self.match = [convert_path(p.strip()) for p in self.match.split(',')] self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) def run(self): @@ -54,7 +53,7 @@ def run(self): files.reverse() log.info("%d file(s) matching %s", len(files), pattern) - files = files[self.keep:] + files = files[self.keep :] for t, f in files: log.info("Deleting %s", f) if not self.dry_run: diff --git a/setuptools/config/_validate_pyproject/__init__.py b/setuptools/config/_validate_pyproject/__init__.py index b605c9ef4a..cddc1599ac 100644 --- a/setuptools/config/_validate_pyproject/__init__.py +++ b/setuptools/config/_validate_pyproject/__init__.py @@ -5,7 +5,7 @@ from .error_reporting import detailed_errors, ValidationError from .extra_validations import EXTRA_VALIDATIONS from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException -from .fastjsonschema_validations import validate as _validate # type: ignore[attr-defined] # mypy false-positive. Pyright is fine here +from .fastjsonschema_validations import validate as _validate # type: ignore[attr-defined] # mypy false-positive. Pyright is fine here __all__ = [ "validate", diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index ec07857fac..66268935f4 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -64,7 +64,7 @@ class StaticModule: """Proxy to a module object that avoids executing arbitrary code.""" def __init__(self, name: str, spec: ModuleSpec): - module = ast.parse(pathlib.Path(spec.origin).read_bytes()) # type: ignore[arg-type] # Let it raise an error on None + module = ast.parse(pathlib.Path(spec.origin).read_bytes()) # type: ignore[arg-type] # Let it raise an error on None vars(self).update(locals()) del self.self diff --git a/setuptools/dist.py b/setuptools/dist.py index 586b8c9d8a..f4e7b257b6 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -393,7 +393,7 @@ def check_packages(dist, attr, value): _Distribution = get_unpatched(distutils.core.Distribution) -class Distribution(_Distribution): # type: ignore[valid-type, misc] # https://github.com/python/mypy/issues/14458 +class Distribution(_Distribution): # type: ignore[valid-type, misc] # https://github.com/python/mypy/issues/14458 """Distribution with support for tests and package data This is an enhanced version of 'distutils.dist.Distribution' that diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py index 0b29df4e36..1ba375d7d9 100644 --- a/setuptools/extern/__init__.py +++ b/setuptools/extern/__init__.py @@ -85,12 +85,12 @@ def install(self): VendorImporter(__name__, names, 'setuptools._vendor').install() if TYPE_CHECKING: - import packaging as packaging - import ordered_set as ordered_set - import more_itertools as more_itertools - import importlib_metadata as importlib_metadata - import zipp as zipp - import importlib_resources as importlib_resources - import jaraco as jaraco - import typing_extensions as typing_extensions - import tomli as tomli + import packaging as packaging + import ordered_set as ordered_set + import more_itertools as more_itertools + import importlib_metadata as importlib_metadata + import zipp as zipp + import importlib_resources as importlib_resources + import jaraco as jaraco + import typing_extensions as typing_extensions + import tomli as tomli diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py index 806f9de81e..abf4a383b0 100644 --- a/setuptools/sandbox.py +++ b/setuptools/sandbox.py @@ -274,6 +274,7 @@ def wrap(self: AbstractSandbox, src, dst, *args, **kw): return wrap + def _mk_single_path_wrapper(name, original=None): original = original or getattr(_os, name) @@ -284,6 +285,7 @@ def wrap(self: AbstractSandbox, path, *args, **kw): return wrap + def _mk_single_with_return(name): original = getattr(_os, name) @@ -295,6 +297,7 @@ def wrap(self: AbstractSandbox, path, *args, **kw): return wrap + def _mk_query(name): original = getattr(_os, name) @@ -306,6 +309,7 @@ def wrap(self: AbstractSandbox, *args, **kw): return wrap + class AbstractSandbox: """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" diff --git a/setuptools/wheel.py b/setuptools/wheel.py index 40e26ca706..c6eabddc1f 100644 --- a/setuptools/wheel.py +++ b/setuptools/wheel.py @@ -14,7 +14,7 @@ import setuptools from setuptools.extern.packaging.version import Version as parse_version from setuptools.extern.packaging.tags import sys_tags -from packaging.utils import canonicalize_name +from setuptools.extern.packaging.utils import canonicalize_name from setuptools.command.egg_info import write_requirements, _egg_basename from setuptools.archive_util import _unpack_zipfile_obj From 80505607ac73233cfb29e6c54aeb2fc19b967e1b Mon Sep 17 00:00:00 2001 From: Avasam Date: Sun, 1 Oct 2023 13:52:23 -0400 Subject: [PATCH 03/25] Exclude tox from mypy check --- pyproject.toml | 1 + tox.ini | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 25240c80e7..9fe4bac237 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ ignore_missing_imports = true explicit_package_bases = true exclude = [ "^build/", + "^.tox/", # Duplicate module name "^pkg_resources/tests/data/my-test-package-source/setup.py$", # Vendored diff --git a/tox.ini b/tox.ini index 22cc86de6c..23410538a5 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,6 @@ toxworkdir={env:TOX_WORK_DIR:.tox} [testenv] deps = # Ideally all the dependencies should be set as "extras" - # workaround for pytest-dev/execnet#195 execnet>=2.0.0 # workaround for pypa/build#630 build[virtualenv] @ git+https://github.com/jaraco/build@bugfix/630-importlib-metadata From ddbf028c517c138ebcfd50b6f4624546258a40d1 Mon Sep 17 00:00:00 2001 From: Avasam Date: Sun, 1 Oct 2023 15:06:02 -0400 Subject: [PATCH 04/25] Fix all mypy issues again --- setuptools/command/_requirestxt.py | 2 +- setuptools/command/build_ext.py | 2 ++ setuptools/command/editable_wheel.py | 2 +- setuptools/config/pyprojecttoml.py | 3 ++- setuptools/config/setupcfg.py | 2 +- setuptools/dist.py | 14 +++++++------- setuptools/extern/__init__.py | 1 + setuptools/extern/packaging/__init__.pyi | 1 + setuptools/tests/_packaging_compat.py | 2 +- 9 files changed, 17 insertions(+), 12 deletions(-) create mode 100644 setuptools/extern/packaging/__init__.pyi diff --git a/setuptools/command/_requirestxt.py b/setuptools/command/_requirestxt.py index 32bae2c4b4..827ba87356 100644 --- a/setuptools/command/_requirestxt.py +++ b/setuptools/command/_requirestxt.py @@ -34,7 +34,7 @@ def _prepare( def _convert_extras_requirements( - extras_require: _StrOrIter, + extras_require: Mapping[str, _StrOrIter], ) -> Mapping[str, _Ordered[Requirement]]: """ Convert requirements in `extras_require` of the form diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 8b84d44fd7..f441995909 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -14,6 +14,7 @@ from setuptools.extension import Extension, Library if TYPE_CHECKING: + from setuptools.dist import Distribution from distutils.command.build_ext import build_ext as _build_ext else: try: @@ -83,6 +84,7 @@ def get_abi3_suffix(): class build_ext(_build_ext): editable_mode: bool = False inplace: bool = False + distribution: "Distribution" def run(self): """Build extensions in build directory, then copy if --inplace""" diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 79c839f8f0..46962acdbf 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -272,7 +272,7 @@ def _run_build_commands( self._run_install("data") return files, mapping - def _run_build_subcommands(self): + def _run_build_subcommands(self) -> None: """ Issue #3501 indicates that some plugins/customizations might rely on: diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py index 93dbd9f559..78e975a53c 100644 --- a/setuptools/config/pyprojecttoml.py +++ b/setuptools/config/pyprojecttoml.py @@ -22,6 +22,7 @@ if TYPE_CHECKING: from setuptools.dist import Distribution # noqa + from typing_extensions import Self _Path = Union[str, os.PathLike] _logger = logging.getLogger(__name__) @@ -402,7 +403,7 @@ def __init__( self._project_cfg = project_cfg self._setuptools_cfg = setuptools_cfg - def __enter__(self): + def __enter__(self) -> "Self": """When entering the context, the values of ``packages``, ``py_modules`` and ``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``. """ diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index ae1682dfa1..0153058c29 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -474,7 +474,7 @@ def parse_section(self, section_options): # Keep silent for a new option may appear anytime. self[name] = value - def parse(self): + def parse(self) -> None: """Parses configuration file items from one or more related sections. diff --git a/setuptools/dist.py b/setuptools/dist.py index 52f9ff6534..e097e85ad6 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -10,7 +10,7 @@ from contextlib import suppress from glob import iglob from pathlib import Path -from typing import List, Optional, Set +from typing import Dict, List, MutableMapping, Optional, Sequence, Set, Tuple import distutils.cmd import distutils.command @@ -283,19 +283,19 @@ def patch_missing_pkg_info(self, attrs): dist._version = _normalization.safe_version(str(attrs['version'])) self._patched_dist = dist - def __init__(self, attrs=None): + def __init__(self, attrs: Optional[MutableMapping] = None) -> None: have_package_data = hasattr(self, "package_data") if not have_package_data: - self.package_data = {} + self.package_data: Dict[str, List[str]] = {} attrs = attrs or {} - self.dist_files = [] + self.dist_files: List[Tuple[str, str, str]] = [] # Filter-out setuptools' specific options. self.src_root = attrs.pop("src_root", None) self.patch_missing_pkg_info(attrs) self.dependency_links = attrs.pop('dependency_links', []) self.setup_requires = attrs.pop('setup_requires', []) for ep in metadata.entry_points(group='distutils.setup_keywords'): - vars(self).setdefault(ep.name, None) + vars(self).setdefault(ep.name, None) # type: ignore[attr-defined] # https://github.com/python/mypy/issues/14458 metadata_only = set(self._DISTUTILS_UNSUPPORTED_METADATA) metadata_only -= {"install_requires", "extras_require"} @@ -407,7 +407,7 @@ def _normalize_requires(self): k: list(map(str, _reqs.parse(v or []))) for k, v in extras_require.items() } - def _finalize_license_files(self): + def _finalize_license_files(self) -> None: """Compute names of all license files which should be included.""" license_files: Optional[List[str]] = self.metadata.license_files patterns: List[str] = license_files if license_files else [] @@ -420,7 +420,7 @@ def _finalize_license_files(self): # Default patterns match the ones wheel uses # See https://wheel.readthedocs.io/en/stable/user_guide.html # -> 'Including license files in the generated wheel file' - patterns = ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*') + patterns = ['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*'] self.metadata.license_files = list( unique_everseen(self._expand_patterns(patterns)) diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py index 1ba375d7d9..8ccf73be98 100644 --- a/setuptools/extern/__init__.py +++ b/setuptools/extern/__init__.py @@ -86,6 +86,7 @@ def install(self): if TYPE_CHECKING: import packaging as packaging + import packaging.version import ordered_set as ordered_set import more_itertools as more_itertools import importlib_metadata as importlib_metadata diff --git a/setuptools/extern/packaging/__init__.pyi b/setuptools/extern/packaging/__init__.pyi new file mode 100644 index 0000000000..77224d3269 --- /dev/null +++ b/setuptools/extern/packaging/__init__.pyi @@ -0,0 +1 @@ +from packaging import * diff --git a/setuptools/tests/_packaging_compat.py b/setuptools/tests/_packaging_compat.py index 5bdcc554d5..7538ba5e07 100644 --- a/setuptools/tests/_packaging_compat.py +++ b/setuptools/tests/_packaging_compat.py @@ -1,7 +1,7 @@ from packaging import __version__ as packaging_version if tuple(packaging_version.split(".")) >= ("23", "2"): - from packaging.metadata import Metadata + from packaging.metadata import Metadata # type: ignore[attr-defined] else: # Just pretend it exists while waiting for release... from unittest.mock import MagicMock From 37a2bb111f0a34177fc83c097d32d4fb47dbbb1d Mon Sep 17 00:00:00 2001 From: Avasam Date: Sun, 22 Oct 2023 18:42:57 -0400 Subject: [PATCH 05/25] Address PR comments --- mypy.ini | 22 + pkg_resources/__init__.py | 16 +- pkg_resources/extern/__init__.py | 8 - pkg_resources/extern/packaging/__init__.pyi | 14 - pkg_resources/extern/packaging/_elffile.pyi | 1 - pkg_resources/extern/packaging/_manylinux.pyi | 1 - pkg_resources/extern/packaging/_musllinux.pyi | 1 - pkg_resources/extern/packaging/_parser.pyi | 1 - .../extern/packaging/_structures.pyi | 1 - pkg_resources/extern/packaging/_tokenizer.pyi | 1 - pkg_resources/extern/packaging/markers.pyi | 1 - pkg_resources/extern/packaging/metadata.pyi | 1 - .../extern/packaging/requirements.pyi | 1 - pkg_resources/extern/packaging/specifiers.pyi | 1 - pkg_resources/extern/packaging/tags.pyi | 1 - pkg_resources/extern/packaging/utils.pyi | 1 - pkg_resources/extern/packaging/version.pyi | 1 - pkg_resources/tests/test_resources.py | 1762 ++++++++--------- pyproject.toml | 20 - setup.cfg | 2 +- setup.py | 4 +- setuptools/_importlib.py | 102 +- setuptools/_normalization.py | 250 +-- setuptools/command/egg_info.py | 1470 +++++++------- setuptools/extern/__init__.py | 13 - setuptools/extern/packaging/__init__.pyi | 1 - setuptools/msvc.py | 15 +- setuptools/sandbox.py | 88 +- setuptools/tests/_packaging_compat.py | 4 +- .../integration/test_pip_install_sdist.py | 10 +- setuptools/tests/test_bdist_egg.py | 2 +- 31 files changed, 1880 insertions(+), 1936 deletions(-) create mode 100644 mypy.ini delete mode 100644 pkg_resources/extern/packaging/__init__.pyi delete mode 100644 pkg_resources/extern/packaging/_elffile.pyi delete mode 100644 pkg_resources/extern/packaging/_manylinux.pyi delete mode 100644 pkg_resources/extern/packaging/_musllinux.pyi delete mode 100644 pkg_resources/extern/packaging/_parser.pyi delete mode 100644 pkg_resources/extern/packaging/_structures.pyi delete mode 100644 pkg_resources/extern/packaging/_tokenizer.pyi delete mode 100644 pkg_resources/extern/packaging/markers.pyi delete mode 100644 pkg_resources/extern/packaging/metadata.pyi delete mode 100644 pkg_resources/extern/packaging/requirements.pyi delete mode 100644 pkg_resources/extern/packaging/specifiers.pyi delete mode 100644 pkg_resources/extern/packaging/tags.pyi delete mode 100644 pkg_resources/extern/packaging/utils.pyi delete mode 100644 pkg_resources/extern/packaging/version.pyi delete mode 100644 setuptools/extern/packaging/__init__.pyi diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000000..c01d06a310 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,22 @@ +[mypy] +# CI should test for all versions, local development gets hints for oldest supported +python_version = 3.8 +strict = False +# TODO: Not all dependencies are typed. setuptools itself should be typed too +# TODO: Test environment is not yet properly configured to install all imported packages +ignore_missing_imports = True +# required to support namespace packages: https://github.com/python/mypy/issues/14057 +explicit_package_bases = True +exclude = (?x)( + ^build/ + | ^.tox/ + | ^pkg_resources/tests/data/my-test-package-source/setup.py$ # Duplicate module name + | ^.+?/(_vendor|extern)/ # Vendored + | ^setuptools/_distutils/ # Vendored + ) + +# https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993 +[mypy-pkg_resources.extern.*] +ignore_missing_imports = True +[mypy-setuptools.extern.*] +ignore_missing_imports = True diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index ad877f7f00..03bfad16f7 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -65,16 +65,13 @@ from os import open as os_open from os.path import isdir, split -importlib_machinery: Optional[types.ModuleType] try: - import importlib.machinery - - importlib_machinery = importlib.machinery + import importlib.machinery as importlib_machinery # access attribute to force import under delayed import mechanisms. importlib_machinery.__name__ except ImportError: - importlib_machinery = None + importlib_machinery: Optional[types.ModuleType] = None # type: ignore[no-redef] # https://github.com/python/mypy/issues/1393 from pkg_resources.extern.jaraco.text import ( yield_lines, @@ -82,8 +79,8 @@ join_continuation, ) -from pkg_resources.extern import platformdirs -from pkg_resources.extern import packaging +from pkg_resources.extern import platformdirs # type: ignore[attr-defined] +from pkg_resources.extern import packaging # type: ignore[attr-defined] __import__('pkg_resources.extern.packaging.version') __import__('pkg_resources.extern.packaging.specifiers') @@ -2231,8 +2228,9 @@ def resolve_egg_link(path): if hasattr(pkgutil, 'ImpImporter'): register_finder(pkgutil.ImpImporter, find_on_path) -if importlib_machinery: - register_finder(importlib_machinery.FileFinder, find_on_path) +# TODO: If importlib_machinery import fails, this will also fail. This should be fixed. +# https://github.com/pypa/setuptools/pull/3979/files#r1367959803 +register_finder(importlib_machinery.FileFinder, find_on_path) # type: ignore[no-untyped-call] _declare_state('dict', _namespace_handlers={}) _declare_state('dict', _namespace_packages={}) diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py index c7bb6ccad5..948bcc6094 100644 --- a/pkg_resources/extern/__init__.py +++ b/pkg_resources/extern/__init__.py @@ -1,6 +1,5 @@ import importlib.util import sys -from typing import TYPE_CHECKING class VendorImporter: @@ -79,10 +78,3 @@ def install(self): 'more_itertools', ) VendorImporter(__name__, names).install() - -if TYPE_CHECKING: - from . import packaging as packaging - import platformdirs as platformdirs - import jaraco as jaraco - import importlib_resources as importlib_resources - import more_itertools as more_itertools diff --git a/pkg_resources/extern/packaging/__init__.pyi b/pkg_resources/extern/packaging/__init__.pyi deleted file mode 100644 index a6966e6f13..0000000000 --- a/pkg_resources/extern/packaging/__init__.pyi +++ /dev/null @@ -1,14 +0,0 @@ -from packaging import * -from . import _elffile as _elffile -from . import _manylinux as _manylinux -from . import _musllinux as _musllinux -from . import _parser as _parser -from . import _structures as _structures -from . import _tokenizer as _tokenizer -from . import markers as markers -from . import metadata as metadata -from . import requirements as requirements -from . import specifiers as specifiers -from . import tags as tags -from . import utils as utils -from . import version as version diff --git a/pkg_resources/extern/packaging/_elffile.pyi b/pkg_resources/extern/packaging/_elffile.pyi deleted file mode 100644 index 9abaf0ce9d..0000000000 --- a/pkg_resources/extern/packaging/_elffile.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging._elffile import * diff --git a/pkg_resources/extern/packaging/_manylinux.pyi b/pkg_resources/extern/packaging/_manylinux.pyi deleted file mode 100644 index 8178e0cc65..0000000000 --- a/pkg_resources/extern/packaging/_manylinux.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging._manylinux import * diff --git a/pkg_resources/extern/packaging/_musllinux.pyi b/pkg_resources/extern/packaging/_musllinux.pyi deleted file mode 100644 index f200d41384..0000000000 --- a/pkg_resources/extern/packaging/_musllinux.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging._musllinux import * diff --git a/pkg_resources/extern/packaging/_parser.pyi b/pkg_resources/extern/packaging/_parser.pyi deleted file mode 100644 index 2b70184caa..0000000000 --- a/pkg_resources/extern/packaging/_parser.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging._parser import * diff --git a/pkg_resources/extern/packaging/_structures.pyi b/pkg_resources/extern/packaging/_structures.pyi deleted file mode 100644 index 7a15df2a05..0000000000 --- a/pkg_resources/extern/packaging/_structures.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging._structures import * diff --git a/pkg_resources/extern/packaging/_tokenizer.pyi b/pkg_resources/extern/packaging/_tokenizer.pyi deleted file mode 100644 index c7ff4583c8..0000000000 --- a/pkg_resources/extern/packaging/_tokenizer.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging._tokenizer import * diff --git a/pkg_resources/extern/packaging/markers.pyi b/pkg_resources/extern/packaging/markers.pyi deleted file mode 100644 index 78d2fd90f8..0000000000 --- a/pkg_resources/extern/packaging/markers.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging.markers import * diff --git a/pkg_resources/extern/packaging/metadata.pyi b/pkg_resources/extern/packaging/metadata.pyi deleted file mode 100644 index 8c305b5830..0000000000 --- a/pkg_resources/extern/packaging/metadata.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging.metadata import * diff --git a/pkg_resources/extern/packaging/requirements.pyi b/pkg_resources/extern/packaging/requirements.pyi deleted file mode 100644 index ba0d040352..0000000000 --- a/pkg_resources/extern/packaging/requirements.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging.requirements import * diff --git a/pkg_resources/extern/packaging/specifiers.pyi b/pkg_resources/extern/packaging/specifiers.pyi deleted file mode 100644 index 5e4e2fb7a5..0000000000 --- a/pkg_resources/extern/packaging/specifiers.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging.specifiers import * diff --git a/pkg_resources/extern/packaging/tags.pyi b/pkg_resources/extern/packaging/tags.pyi deleted file mode 100644 index 4a383c412c..0000000000 --- a/pkg_resources/extern/packaging/tags.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging.tags import * diff --git a/pkg_resources/extern/packaging/utils.pyi b/pkg_resources/extern/packaging/utils.pyi deleted file mode 100644 index 87f9c84f83..0000000000 --- a/pkg_resources/extern/packaging/utils.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging.utils import * diff --git a/pkg_resources/extern/packaging/version.pyi b/pkg_resources/extern/packaging/version.pyi deleted file mode 100644 index 5f4f9dcdf7..0000000000 --- a/pkg_resources/extern/packaging/version.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging.version import * diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py index 608c67aeeb..c42ac1231c 100644 --- a/pkg_resources/tests/test_resources.py +++ b/pkg_resources/tests/test_resources.py @@ -1,881 +1,881 @@ -import os -import sys -import string -import platform -import itertools - -import pytest -from pkg_resources.extern import packaging - -import pkg_resources -from pkg_resources import ( - parse_requirements, - VersionConflict, - parse_version, - Distribution, - EntryPoint, - Requirement, - safe_version, - safe_name, - WorkingSet, -) - - -# from Python 3.6 docs. -def pairwise(iterable): - "s -> (s0,s1), (s1,s2), (s2, s3), ..." - a, b = itertools.tee(iterable) - next(b, None) - return zip(a, b) - - -class Metadata(pkg_resources.EmptyProvider): - """Mock object to return metadata as if from an on-disk distribution""" - - def __init__(self, *pairs): - self.metadata = dict(pairs) - - def has_metadata(self, name): - return name in self.metadata - - def get_metadata(self, name): - return self.metadata[name] - - def get_metadata_lines(self, name): - return pkg_resources.yield_lines(self.get_metadata(name)) - - -dist_from_fn = pkg_resources.Distribution.from_filename - - -class TestDistro: - def testCollection(self): - # empty path should produce no distributions - ad = pkg_resources.Environment([], platform=None, python=None) - assert list(ad) == [] - assert ad['FooPkg'] == [] - ad.add(dist_from_fn("FooPkg-1.3_1.egg")) - ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg")) - ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg")) - - # Name is in there now - assert ad['FooPkg'] - # But only 1 package - assert list(ad) == ['foopkg'] - - # Distributions sort by version - expected = ['1.4', '1.3-1', '1.2'] - assert [dist.version for dist in ad['FooPkg']] == expected - - # Removing a distribution leaves sequence alone - ad.remove(ad['FooPkg'][1]) - assert [dist.version for dist in ad['FooPkg']] == ['1.4', '1.2'] - - # And inserting adds them in order - ad.add(dist_from_fn("FooPkg-1.9.egg")) - assert [dist.version for dist in ad['FooPkg']] == ['1.9', '1.4', '1.2'] - - ws = WorkingSet([]) - foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg") - foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg") - (req,) = parse_requirements("FooPkg>=1.3") - - # Nominal case: no distros on path, should yield all applicable - assert ad.best_match(req, ws).version == '1.9' - # If a matching distro is already installed, should return only that - ws.add(foo14) - assert ad.best_match(req, ws).version == '1.4' - - # If the first matching distro is unsuitable, it's a version conflict - ws = WorkingSet([]) - ws.add(foo12) - ws.add(foo14) - with pytest.raises(VersionConflict): - ad.best_match(req, ws) - - # If more than one match on the path, the first one takes precedence - ws = WorkingSet([]) - ws.add(foo14) - ws.add(foo12) - ws.add(foo14) - assert ad.best_match(req, ws).version == '1.4' - - def checkFooPkg(self, d): - assert d.project_name == "FooPkg" - assert d.key == "foopkg" - assert d.version == "1.3.post1" - assert d.py_version == "2.4" - assert d.platform == "win32" - assert d.parsed_version == parse_version("1.3-1") - - def testDistroBasics(self): - d = Distribution( - "/some/path", - project_name="FooPkg", - version="1.3-1", - py_version="2.4", - platform="win32", - ) - self.checkFooPkg(d) - - d = Distribution("/some/path") - assert d.py_version == '{}.{}'.format(*sys.version_info) - assert d.platform is None - - def testDistroParse(self): - d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg") - self.checkFooPkg(d) - d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg-info") - self.checkFooPkg(d) - - def testDistroMetadata(self): - d = Distribution( - "/some/path", - project_name="FooPkg", - py_version="2.4", - platform="win32", - metadata=Metadata(('PKG-INFO', "Metadata-Version: 1.0\nVersion: 1.3-1\n")), - ) - self.checkFooPkg(d) - - def distRequires(self, txt): - return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) - - def checkRequires(self, dist, txt, extras=()): - assert list(dist.requires(extras)) == list(parse_requirements(txt)) - - def testDistroDependsSimple(self): - for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": - self.checkRequires(self.distRequires(v), v) - - needs_object_dir = pytest.mark.skipif( - not hasattr(object, '__dir__'), - reason='object.__dir__ necessary for self.__dir__ implementation', - ) - - def test_distribution_dir(self): - d = pkg_resources.Distribution() - dir(d) - - @needs_object_dir - def test_distribution_dir_includes_provider_dir(self): - d = pkg_resources.Distribution() - before = d.__dir__() - assert 'test_attr' not in before - d._provider.test_attr = None - after = d.__dir__() - assert len(after) == len(before) + 1 - assert 'test_attr' in after - - @needs_object_dir - def test_distribution_dir_ignores_provider_dir_leading_underscore(self): - d = pkg_resources.Distribution() - before = d.__dir__() - assert '_test_attr' not in before - d._provider._test_attr = None - after = d.__dir__() - assert len(after) == len(before) - assert '_test_attr' not in after - - def testResolve(self): - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - # Resolving no requirements -> nothing to install - assert list(ws.resolve([], ad)) == [] - # Request something not in the collection -> DistributionNotFound - with pytest.raises(pkg_resources.DistributionNotFound): - ws.resolve(parse_requirements("Foo"), ad) - - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.egg", - metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")), - ) - ad.add(Foo) - ad.add(Distribution.from_filename("Foo-0.9.egg")) - - # Request thing(s) that are available -> list to activate - for i in range(3): - targets = list(ws.resolve(parse_requirements("Foo"), ad)) - assert targets == [Foo] - list(map(ws.add, targets)) - with pytest.raises(VersionConflict): - ws.resolve(parse_requirements("Foo==0.9"), ad) - ws = WorkingSet([]) # reset - - # Request an extra that causes an unresolved dependency for "Baz" - with pytest.raises(pkg_resources.DistributionNotFound): - ws.resolve(parse_requirements("Foo[bar]"), ad) - Baz = Distribution.from_filename( - "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) - ) - ad.add(Baz) - - # Activation list now includes resolved dependency - assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) == [Foo, Baz] - # Requests for conflicting versions produce VersionConflict - with pytest.raises(VersionConflict) as vc: - ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad) - - msg = 'Foo 0.9 is installed but Foo==1.2 is required' - assert vc.value.report() == msg - - def test_environment_marker_evaluation_negative(self): - """Environment markers are evaluated at resolution time.""" - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - res = ws.resolve(parse_requirements("Foo;python_version<'2'"), ad) - assert list(res) == [] - - def test_environment_marker_evaluation_positive(self): - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - Foo = Distribution.from_filename("/foo_dir/Foo-1.2.dist-info") - ad.add(Foo) - res = ws.resolve(parse_requirements("Foo;python_version>='2'"), ad) - assert list(res) == [Foo] - - def test_environment_marker_evaluation_called(self): - """ - If one package foo requires bar without any extras, - markers should pass for bar without extras. - """ - (parent_req,) = parse_requirements("foo") - (req,) = parse_requirements("bar;python_version>='2'") - req_extras = pkg_resources._ReqExtras({req: parent_req.extras}) - assert req_extras.markers_pass(req) - - (parent_req,) = parse_requirements("foo[]") - (req,) = parse_requirements("bar;python_version>='2'") - req_extras = pkg_resources._ReqExtras({req: parent_req.extras}) - assert req_extras.markers_pass(req) - - def test_marker_evaluation_with_extras(self): - """Extras are also evaluated as markers at resolution time.""" - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.dist-info", - metadata=Metadata( - ( - "METADATA", - "Provides-Extra: baz\n" "Requires-Dist: quux; extra=='baz'", - ) - ), - ) - ad.add(Foo) - assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] - quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") - ad.add(quux) - res = list(ws.resolve(parse_requirements("Foo[baz]"), ad)) - assert res == [Foo, quux] - - def test_marker_evaluation_with_extras_normlized(self): - """Extras are also evaluated as markers at resolution time.""" - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.dist-info", - metadata=Metadata( - ( - "METADATA", - "Provides-Extra: baz-lightyear\n" - "Requires-Dist: quux; extra=='baz-lightyear'", - ) - ), - ) - ad.add(Foo) - assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] - quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") - ad.add(quux) - res = list(ws.resolve(parse_requirements("Foo[baz-lightyear]"), ad)) - assert res == [Foo, quux] - - def test_marker_evaluation_with_multiple_extras(self): - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.dist-info", - metadata=Metadata( - ( - "METADATA", - "Provides-Extra: baz\n" - "Requires-Dist: quux; extra=='baz'\n" - "Provides-Extra: bar\n" - "Requires-Dist: fred; extra=='bar'\n", - ) - ), - ) - ad.add(Foo) - quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") - ad.add(quux) - fred = Distribution.from_filename("/foo_dir/fred-0.1.dist-info") - ad.add(fred) - res = list(ws.resolve(parse_requirements("Foo[baz,bar]"), ad)) - assert sorted(res) == [fred, quux, Foo] - - def test_marker_evaluation_with_extras_loop(self): - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - a = Distribution.from_filename( - "/foo_dir/a-0.2.dist-info", - metadata=Metadata(("METADATA", "Requires-Dist: c[a]")), - ) - b = Distribution.from_filename( - "/foo_dir/b-0.3.dist-info", - metadata=Metadata(("METADATA", "Requires-Dist: c[b]")), - ) - c = Distribution.from_filename( - "/foo_dir/c-1.0.dist-info", - metadata=Metadata( - ( - "METADATA", - "Provides-Extra: a\n" - "Requires-Dist: b;extra=='a'\n" - "Provides-Extra: b\n" - "Requires-Dist: foo;extra=='b'", - ) - ), - ) - foo = Distribution.from_filename("/foo_dir/foo-0.1.dist-info") - for dist in (a, b, c, foo): - ad.add(dist) - res = list(ws.resolve(parse_requirements("a"), ad)) - assert res == [a, c, b, foo] - - @pytest.mark.xfail( - sys.version_info[:2] == (3, 12) and sys.version_info.releaselevel != 'final', - reason="https://github.com/python/cpython/issues/103632", - ) - def testDistroDependsOptions(self): - d = self.distRequires( - """ - Twisted>=1.5 - [docgen] - ZConfig>=2.0 - docutils>=0.3 - [fastcgi] - fcgiapp>=0.1""" - ) - self.checkRequires(d, "Twisted>=1.5") - self.checkRequires( - d, "Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] - ) - self.checkRequires(d, "Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]) - self.checkRequires( - d, - "Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), - ["docgen", "fastcgi"], - ) - self.checkRequires( - d, - "Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), - ["fastcgi", "docgen"], - ) - with pytest.raises(pkg_resources.UnknownExtra): - d.requires(["foo"]) - - -class TestWorkingSet: - def test_find_conflicting(self): - ws = WorkingSet([]) - Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg") - ws.add(Foo) - - # create a requirement that conflicts with Foo 1.2 - req = next(parse_requirements("Foo<1.2")) - - with pytest.raises(VersionConflict) as vc: - ws.find(req) - - msg = 'Foo 1.2 is installed but Foo<1.2 is required' - assert vc.value.report() == msg - - def test_resolve_conflicts_with_prior(self): - """ - A ContextualVersionConflict should be raised when a requirement - conflicts with a prior requirement for a different package. - """ - # Create installation where Foo depends on Baz 1.0 and Bar depends on - # Baz 2.0. - ws = WorkingSet([]) - md = Metadata(('depends.txt', "Baz==1.0")) - Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md) - ws.add(Foo) - md = Metadata(('depends.txt', "Baz==2.0")) - Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md) - ws.add(Bar) - Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg") - ws.add(Baz) - Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg") - ws.add(Baz) - - with pytest.raises(VersionConflict) as vc: - ws.resolve(parse_requirements("Foo\nBar\n")) - - msg = "Baz 1.0 is installed but Baz==2.0 is required by " - msg += repr(set(['Bar'])) - assert vc.value.report() == msg - - -class TestEntryPoints: - def assertfields(self, ep): - assert ep.name == "foo" - assert ep.module_name == "pkg_resources.tests.test_resources" - assert ep.attrs == ("TestEntryPoints",) - assert ep.extras == ("x",) - assert ep.load() is TestEntryPoints - expect = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]" - assert str(ep) == expect - - def setup_method(self, method): - self.dist = Distribution.from_filename( - "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt', '[x]')) - ) - - def testBasics(self): - ep = EntryPoint( - "foo", - "pkg_resources.tests.test_resources", - ["TestEntryPoints"], - ["x"], - self.dist, - ) - self.assertfields(ep) - - def testParse(self): - s = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]" - ep = EntryPoint.parse(s, self.dist) - self.assertfields(ep) - - ep = EntryPoint.parse("bar baz= spammity[PING]") - assert ep.name == "bar baz" - assert ep.module_name == "spammity" - assert ep.attrs == () - assert ep.extras == ("ping",) - - ep = EntryPoint.parse(" fizzly = wocka:foo") - assert ep.name == "fizzly" - assert ep.module_name == "wocka" - assert ep.attrs == ("foo",) - assert ep.extras == () - - # plus in the name - spec = "html+mako = mako.ext.pygmentplugin:MakoHtmlLexer" - ep = EntryPoint.parse(spec) - assert ep.name == 'html+mako' - - reject_specs = "foo", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2" - - @pytest.mark.parametrize("reject_spec", reject_specs) - def test_reject_spec(self, reject_spec): - with pytest.raises(ValueError): - EntryPoint.parse(reject_spec) - - def test_printable_name(self): - """ - Allow any printable character in the name. - """ - # Create a name with all printable characters; strip the whitespace. - name = string.printable.strip() - spec = "{name} = module:attr".format(**locals()) - ep = EntryPoint.parse(spec) - assert ep.name == name - - def checkSubMap(self, m): - assert len(m) == len(self.submap_expect) - for key, ep in self.submap_expect.items(): - assert m.get(key).name == ep.name - assert m.get(key).module_name == ep.module_name - assert sorted(m.get(key).attrs) == sorted(ep.attrs) - assert sorted(m.get(key).extras) == sorted(ep.extras) - - submap_expect = dict( - feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), - feature2=EntryPoint( - 'feature2', 'another.module', ['SomeClass'], ['extra1', 'extra2'] - ), - feature3=EntryPoint('feature3', 'this.module', extras=['something']), - ) - submap_str = """ - # define features for blah blah - feature1 = somemodule:somefunction - feature2 = another.module:SomeClass [extra1,extra2] - feature3 = this.module [something] - """ - - def testParseList(self): - self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) - with pytest.raises(ValueError): - EntryPoint.parse_group("x a", "foo=bar") - with pytest.raises(ValueError): - EntryPoint.parse_group("x", ["foo=baz", "foo=bar"]) - - def testParseMap(self): - m = EntryPoint.parse_map({'xyz': self.submap_str}) - self.checkSubMap(m['xyz']) - assert list(m.keys()) == ['xyz'] - m = EntryPoint.parse_map("[xyz]\n" + self.submap_str) - self.checkSubMap(m['xyz']) - assert list(m.keys()) == ['xyz'] - with pytest.raises(ValueError): - EntryPoint.parse_map(["[xyz]", "[xyz]"]) - with pytest.raises(ValueError): - EntryPoint.parse_map(self.submap_str) - - def testDeprecationWarnings(self): - ep = EntryPoint( - "foo", "pkg_resources.tests.test_resources", ["TestEntryPoints"], ["x"] - ) - with pytest.warns(pkg_resources.PkgResourcesDeprecationWarning): - ep.load(require=False) - - -class TestRequirements: - def testBasics(self): - r = Requirement.parse("Twisted>=1.2") - assert str(r) == "Twisted>=1.2" - assert repr(r) == "Requirement.parse('Twisted>=1.2')" - assert r == Requirement("Twisted>=1.2") - assert r == Requirement("twisTed>=1.2") - assert r != Requirement("Twisted>=2.0") - assert r != Requirement("Zope>=1.2") - assert r != Requirement("Zope>=3.0") - assert r != Requirement("Twisted[extras]>=1.2") - - def testOrdering(self): - r1 = Requirement("Twisted==1.2c1,>=1.2") - r2 = Requirement("Twisted>=1.2,==1.2c1") - assert r1 == r2 - assert str(r1) == str(r2) - assert str(r2) == "Twisted==1.2c1,>=1.2" - assert Requirement("Twisted") != Requirement( - "Twisted @ https://localhost/twisted.zip" - ) - - def testBasicContains(self): - r = Requirement("Twisted>=1.2") - foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") - twist11 = Distribution.from_filename("Twisted-1.1.egg") - twist12 = Distribution.from_filename("Twisted-1.2.egg") - assert parse_version('1.2') in r - assert parse_version('1.1') not in r - assert '1.2' in r - assert '1.1' not in r - assert foo_dist not in r - assert twist11 not in r - assert twist12 in r - - def testOptionsAndHashing(self): - r1 = Requirement.parse("Twisted[foo,bar]>=1.2") - r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") - assert r1 == r2 - assert set(r1.extras) == set(("foo", "bar")) - assert set(r2.extras) == set(("foo", "bar")) - assert hash(r1) == hash(r2) - assert hash(r1) == hash( - ( - "twisted", - None, - packaging.specifiers.SpecifierSet(">=1.2"), - frozenset(["foo", "bar"]), - None, - ) - ) - assert hash( - Requirement.parse("Twisted @ https://localhost/twisted.zip") - ) == hash( - ( - "twisted", - "https://localhost/twisted.zip", - packaging.specifiers.SpecifierSet(), - frozenset(), - None, - ) - ) - - def testVersionEquality(self): - r1 = Requirement.parse("foo==0.3a2") - r2 = Requirement.parse("foo!=0.3a4") - d = Distribution.from_filename - - assert d("foo-0.3a4.egg") not in r1 - assert d("foo-0.3a1.egg") not in r1 - assert d("foo-0.3a4.egg") not in r2 - - assert d("foo-0.3a2.egg") in r1 - assert d("foo-0.3a2.egg") in r2 - assert d("foo-0.3a3.egg") in r2 - assert d("foo-0.3a5.egg") in r2 - - def testSetuptoolsProjectName(self): - """ - The setuptools project should implement the setuptools package. - """ - - assert Requirement.parse('setuptools').project_name == 'setuptools' - # setuptools 0.7 and higher means setuptools. - assert Requirement.parse('setuptools == 0.7').project_name == 'setuptools' - assert Requirement.parse('setuptools == 0.7a1').project_name == 'setuptools' - assert Requirement.parse('setuptools >= 0.7').project_name == 'setuptools' - - -class TestParsing: - def testEmptyParse(self): - assert list(parse_requirements('')) == [] - - def testYielding(self): - for inp, out in [ - ([], []), - ('x', ['x']), - ([[]], []), - (' x\n y', ['x', 'y']), - (['x\n\n', 'y'], ['x', 'y']), - ]: - assert list(pkg_resources.yield_lines(inp)) == out - - def testSplitting(self): - sample = """ - x - [Y] - z - - a - [b ] - # foo - c - [ d] - [q] - v - """ - assert list(pkg_resources.split_sections(sample)) == [ - (None, ["x"]), - ("Y", ["z", "a"]), - ("b", ["c"]), - ("d", []), - ("q", ["v"]), - ] - with pytest.raises(ValueError): - list(pkg_resources.split_sections("[foo")) - - def testSafeName(self): - assert safe_name("adns-python") == "adns-python" - assert safe_name("WSGI Utils") == "WSGI-Utils" - assert safe_name("WSGI Utils") == "WSGI-Utils" - assert safe_name("Money$$$Maker") == "Money-Maker" - assert safe_name("peak.web") != "peak-web" - - def testSafeVersion(self): - assert safe_version("1.2-1") == "1.2.post1" - assert safe_version("1.2 alpha") == "1.2.alpha" - assert safe_version("2.3.4 20050521") == "2.3.4.20050521" - assert safe_version("Money$$$Maker") == "Money-Maker" - assert safe_version("peak.web") == "peak.web" - - def testSimpleRequirements(self): - assert list(parse_requirements('Twis-Ted>=1.2-1')) == [ - Requirement('Twis-Ted>=1.2-1') - ] - assert list(parse_requirements('Twisted >=1.2, \\ # more\n<2.0')) == [ - Requirement('Twisted>=1.2,<2.0') - ] - assert Requirement.parse("FooBar==1.99a3") == Requirement("FooBar==1.99a3") - with pytest.raises(ValueError): - Requirement.parse(">=2.3") - with pytest.raises(ValueError): - Requirement.parse("x\\") - with pytest.raises(ValueError): - Requirement.parse("x==2 q") - with pytest.raises(ValueError): - Requirement.parse("X==1\nY==2") - with pytest.raises(ValueError): - Requirement.parse("#") - - def test_requirements_with_markers(self): - assert Requirement.parse("foobar;os_name=='a'") == Requirement.parse( - "foobar;os_name=='a'" - ) - assert Requirement.parse( - "name==1.1;python_version=='2.7'" - ) != Requirement.parse("name==1.1;python_version=='3.6'") - assert Requirement.parse( - "name==1.0;python_version=='2.7'" - ) != Requirement.parse("name==1.2;python_version=='2.7'") - assert Requirement.parse( - "name[foo]==1.0;python_version=='3.6'" - ) != Requirement.parse("name[foo,bar]==1.0;python_version=='3.6'") - - def test_local_version(self): - (req,) = parse_requirements('foo==1.0+org1') - - def test_spaces_between_multiple_versions(self): - (req,) = parse_requirements('foo>=1.0, <3') - (req,) = parse_requirements('foo >= 1.0, < 3') - - @pytest.mark.parametrize( - ['lower', 'upper'], - [ - ('1.2-rc1', '1.2rc1'), - ('0.4', '0.4.0'), - ('0.4.0.0', '0.4.0'), - ('0.4.0-0', '0.4-0'), - ('0post1', '0.0post1'), - ('0pre1', '0.0c1'), - ('0.0.0preview1', '0c1'), - ('0.0c1', '0-rc1'), - ('1.2a1', '1.2.a.1'), - ('1.2.a', '1.2a'), - ], - ) - def testVersionEquality(self, lower, upper): - assert parse_version(lower) == parse_version(upper) - - torture = """ - 0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1 - 0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2 - 0.77.2-1 0.77.1-1 0.77.0-1 - """ - - @pytest.mark.parametrize( - ['lower', 'upper'], - [ - ('2.1', '2.1.1'), - ('2a1', '2b0'), - ('2a1', '2.1'), - ('2.3a1', '2.3'), - ('2.1-1', '2.1-2'), - ('2.1-1', '2.1.1'), - ('2.1', '2.1post4'), - ('2.1a0-20040501', '2.1'), - ('1.1', '02.1'), - ('3.2', '3.2.post0'), - ('3.2post1', '3.2post2'), - ('0.4', '4.0'), - ('0.0.4', '0.4.0'), - ('0post1', '0.4post1'), - ('2.1.0-rc1', '2.1.0'), - ('2.1dev', '2.1a0'), - ] - + list(pairwise(reversed(torture.split()))), - ) - def testVersionOrdering(self, lower, upper): - assert parse_version(lower) < parse_version(upper) - - def testVersionHashable(self): - """ - Ensure that our versions stay hashable even though we've subclassed - them and added some shim code to them. - """ - assert hash(parse_version("1.0")) == hash(parse_version("1.0")) - - -class TestNamespaces: - ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n" - - @pytest.fixture - def symlinked_tmpdir(self, tmpdir): - """ - Where available, return the tempdir as a symlink, - which as revealed in #231 is more fragile than - a natural tempdir. - """ - if not hasattr(os, 'symlink'): - yield str(tmpdir) - return - - link_name = str(tmpdir) + '-linked' - os.symlink(str(tmpdir), link_name) - try: - yield type(tmpdir)(link_name) - finally: - os.unlink(link_name) - - @pytest.fixture(autouse=True) - def patched_path(self, tmpdir): - """ - Patch sys.path to include the 'site-pkgs' dir. Also - restore pkg_resources._namespace_packages to its - former state. - """ - saved_ns_pkgs = pkg_resources._namespace_packages.copy() - saved_sys_path = sys.path[:] - site_pkgs = tmpdir.mkdir('site-pkgs') - sys.path.append(str(site_pkgs)) - try: - yield - finally: - pkg_resources._namespace_packages = saved_ns_pkgs - sys.path = saved_sys_path - - issue591 = pytest.mark.xfail(platform.system() == 'Windows', reason="#591") - - @issue591 - def test_two_levels_deep(self, symlinked_tmpdir): - """ - Test nested namespace packages - Create namespace packages in the following tree : - site-packages-1/pkg1/pkg2 - site-packages-2/pkg1/pkg2 - Check both are in the _namespace_packages dict and that their __path__ - is correct - """ - real_tmpdir = symlinked_tmpdir.realpath() - tmpdir = symlinked_tmpdir - sys.path.append(str(tmpdir / 'site-pkgs2')) - site_dirs = tmpdir / 'site-pkgs', tmpdir / 'site-pkgs2' - for site in site_dirs: - pkg1 = site / 'pkg1' - pkg2 = pkg1 / 'pkg2' - pkg2.ensure_dir() - (pkg1 / '__init__.py').write_text(self.ns_str, encoding='utf-8') - (pkg2 / '__init__.py').write_text(self.ns_str, encoding='utf-8') - with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): - import pkg1 - assert "pkg1" in pkg_resources._namespace_packages - # attempt to import pkg2 from site-pkgs2 - with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): - import pkg1.pkg2 - # check the _namespace_packages dict - assert "pkg1.pkg2" in pkg_resources._namespace_packages - assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"] - # check the __path__ attribute contains both paths - expected = [ - str(real_tmpdir / "site-pkgs" / "pkg1" / "pkg2"), - str(real_tmpdir / "site-pkgs2" / "pkg1" / "pkg2"), - ] - assert pkg1.pkg2.__path__ == expected - - @issue591 - def test_path_order(self, symlinked_tmpdir): - """ - Test that if multiple versions of the same namespace package subpackage - are on different sys.path entries, that only the one earliest on - sys.path is imported, and that the namespace package's __path__ is in - the correct order. - - Regression test for https://github.com/pypa/setuptools/issues/207 - """ - - tmpdir = symlinked_tmpdir - site_dirs = ( - tmpdir / "site-pkgs", - tmpdir / "site-pkgs2", - tmpdir / "site-pkgs3", - ) - - vers_str = "__version__ = %r" - - for number, site in enumerate(site_dirs, 1): - if number > 1: - sys.path.append(str(site)) - nspkg = site / 'nspkg' - subpkg = nspkg / 'subpkg' - subpkg.ensure_dir() - (nspkg / '__init__.py').write_text(self.ns_str, encoding='utf-8') - (subpkg / '__init__.py').write_text(vers_str % number, encoding='utf-8') - - with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): - import nspkg.subpkg - import nspkg - expected = [str(site.realpath() / 'nspkg') for site in site_dirs] - assert nspkg.__path__ == expected - assert nspkg.subpkg.__version__ == 1 +import os +import sys +import string +import platform +import itertools + +import pytest +from pkg_resources.extern import packaging # type: ignore[attr-defined] + +import pkg_resources +from pkg_resources import ( + parse_requirements, + VersionConflict, + parse_version, + Distribution, + EntryPoint, + Requirement, + safe_version, + safe_name, + WorkingSet, +) + + +# from Python 3.6 docs. +def pairwise(iterable): + "s -> (s0,s1), (s1,s2), (s2, s3), ..." + a, b = itertools.tee(iterable) + next(b, None) + return zip(a, b) + + +class Metadata(pkg_resources.EmptyProvider): + """Mock object to return metadata as if from an on-disk distribution""" + + def __init__(self, *pairs): + self.metadata = dict(pairs) + + def has_metadata(self, name): + return name in self.metadata + + def get_metadata(self, name): + return self.metadata[name] + + def get_metadata_lines(self, name): + return pkg_resources.yield_lines(self.get_metadata(name)) + + +dist_from_fn = pkg_resources.Distribution.from_filename + + +class TestDistro: + def testCollection(self): + # empty path should produce no distributions + ad = pkg_resources.Environment([], platform=None, python=None) + assert list(ad) == [] + assert ad['FooPkg'] == [] + ad.add(dist_from_fn("FooPkg-1.3_1.egg")) + ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg")) + ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg")) + + # Name is in there now + assert ad['FooPkg'] + # But only 1 package + assert list(ad) == ['foopkg'] + + # Distributions sort by version + expected = ['1.4', '1.3-1', '1.2'] + assert [dist.version for dist in ad['FooPkg']] == expected + + # Removing a distribution leaves sequence alone + ad.remove(ad['FooPkg'][1]) + assert [dist.version for dist in ad['FooPkg']] == ['1.4', '1.2'] + + # And inserting adds them in order + ad.add(dist_from_fn("FooPkg-1.9.egg")) + assert [dist.version for dist in ad['FooPkg']] == ['1.9', '1.4', '1.2'] + + ws = WorkingSet([]) + foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg") + foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg") + (req,) = parse_requirements("FooPkg>=1.3") + + # Nominal case: no distros on path, should yield all applicable + assert ad.best_match(req, ws).version == '1.9' + # If a matching distro is already installed, should return only that + ws.add(foo14) + assert ad.best_match(req, ws).version == '1.4' + + # If the first matching distro is unsuitable, it's a version conflict + ws = WorkingSet([]) + ws.add(foo12) + ws.add(foo14) + with pytest.raises(VersionConflict): + ad.best_match(req, ws) + + # If more than one match on the path, the first one takes precedence + ws = WorkingSet([]) + ws.add(foo14) + ws.add(foo12) + ws.add(foo14) + assert ad.best_match(req, ws).version == '1.4' + + def checkFooPkg(self, d): + assert d.project_name == "FooPkg" + assert d.key == "foopkg" + assert d.version == "1.3.post1" + assert d.py_version == "2.4" + assert d.platform == "win32" + assert d.parsed_version == parse_version("1.3-1") + + def testDistroBasics(self): + d = Distribution( + "/some/path", + project_name="FooPkg", + version="1.3-1", + py_version="2.4", + platform="win32", + ) + self.checkFooPkg(d) + + d = Distribution("/some/path") + assert d.py_version == '{}.{}'.format(*sys.version_info) + assert d.platform is None + + def testDistroParse(self): + d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg") + self.checkFooPkg(d) + d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg-info") + self.checkFooPkg(d) + + def testDistroMetadata(self): + d = Distribution( + "/some/path", + project_name="FooPkg", + py_version="2.4", + platform="win32", + metadata=Metadata(('PKG-INFO', "Metadata-Version: 1.0\nVersion: 1.3-1\n")), + ) + self.checkFooPkg(d) + + def distRequires(self, txt): + return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) + + def checkRequires(self, dist, txt, extras=()): + assert list(dist.requires(extras)) == list(parse_requirements(txt)) + + def testDistroDependsSimple(self): + for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": + self.checkRequires(self.distRequires(v), v) + + needs_object_dir = pytest.mark.skipif( + not hasattr(object, '__dir__'), + reason='object.__dir__ necessary for self.__dir__ implementation', + ) + + def test_distribution_dir(self): + d = pkg_resources.Distribution() + dir(d) + + @needs_object_dir + def test_distribution_dir_includes_provider_dir(self): + d = pkg_resources.Distribution() + before = d.__dir__() + assert 'test_attr' not in before + d._provider.test_attr = None + after = d.__dir__() + assert len(after) == len(before) + 1 + assert 'test_attr' in after + + @needs_object_dir + def test_distribution_dir_ignores_provider_dir_leading_underscore(self): + d = pkg_resources.Distribution() + before = d.__dir__() + assert '_test_attr' not in before + d._provider._test_attr = None + after = d.__dir__() + assert len(after) == len(before) + assert '_test_attr' not in after + + def testResolve(self): + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + # Resolving no requirements -> nothing to install + assert list(ws.resolve([], ad)) == [] + # Request something not in the collection -> DistributionNotFound + with pytest.raises(pkg_resources.DistributionNotFound): + ws.resolve(parse_requirements("Foo"), ad) + + Foo = Distribution.from_filename( + "/foo_dir/Foo-1.2.egg", + metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")), + ) + ad.add(Foo) + ad.add(Distribution.from_filename("Foo-0.9.egg")) + + # Request thing(s) that are available -> list to activate + for i in range(3): + targets = list(ws.resolve(parse_requirements("Foo"), ad)) + assert targets == [Foo] + list(map(ws.add, targets)) + with pytest.raises(VersionConflict): + ws.resolve(parse_requirements("Foo==0.9"), ad) + ws = WorkingSet([]) # reset + + # Request an extra that causes an unresolved dependency for "Baz" + with pytest.raises(pkg_resources.DistributionNotFound): + ws.resolve(parse_requirements("Foo[bar]"), ad) + Baz = Distribution.from_filename( + "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) + ) + ad.add(Baz) + + # Activation list now includes resolved dependency + assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) == [Foo, Baz] + # Requests for conflicting versions produce VersionConflict + with pytest.raises(VersionConflict) as vc: + ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad) + + msg = 'Foo 0.9 is installed but Foo==1.2 is required' + assert vc.value.report() == msg + + def test_environment_marker_evaluation_negative(self): + """Environment markers are evaluated at resolution time.""" + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + res = ws.resolve(parse_requirements("Foo;python_version<'2'"), ad) + assert list(res) == [] + + def test_environment_marker_evaluation_positive(self): + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + Foo = Distribution.from_filename("/foo_dir/Foo-1.2.dist-info") + ad.add(Foo) + res = ws.resolve(parse_requirements("Foo;python_version>='2'"), ad) + assert list(res) == [Foo] + + def test_environment_marker_evaluation_called(self): + """ + If one package foo requires bar without any extras, + markers should pass for bar without extras. + """ + (parent_req,) = parse_requirements("foo") + (req,) = parse_requirements("bar;python_version>='2'") + req_extras = pkg_resources._ReqExtras({req: parent_req.extras}) + assert req_extras.markers_pass(req) + + (parent_req,) = parse_requirements("foo[]") + (req,) = parse_requirements("bar;python_version>='2'") + req_extras = pkg_resources._ReqExtras({req: parent_req.extras}) + assert req_extras.markers_pass(req) + + def test_marker_evaluation_with_extras(self): + """Extras are also evaluated as markers at resolution time.""" + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + Foo = Distribution.from_filename( + "/foo_dir/Foo-1.2.dist-info", + metadata=Metadata( + ( + "METADATA", + "Provides-Extra: baz\n" "Requires-Dist: quux; extra=='baz'", + ) + ), + ) + ad.add(Foo) + assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] + quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") + ad.add(quux) + res = list(ws.resolve(parse_requirements("Foo[baz]"), ad)) + assert res == [Foo, quux] + + def test_marker_evaluation_with_extras_normlized(self): + """Extras are also evaluated as markers at resolution time.""" + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + Foo = Distribution.from_filename( + "/foo_dir/Foo-1.2.dist-info", + metadata=Metadata( + ( + "METADATA", + "Provides-Extra: baz-lightyear\n" + "Requires-Dist: quux; extra=='baz-lightyear'", + ) + ), + ) + ad.add(Foo) + assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] + quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") + ad.add(quux) + res = list(ws.resolve(parse_requirements("Foo[baz-lightyear]"), ad)) + assert res == [Foo, quux] + + def test_marker_evaluation_with_multiple_extras(self): + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + Foo = Distribution.from_filename( + "/foo_dir/Foo-1.2.dist-info", + metadata=Metadata( + ( + "METADATA", + "Provides-Extra: baz\n" + "Requires-Dist: quux; extra=='baz'\n" + "Provides-Extra: bar\n" + "Requires-Dist: fred; extra=='bar'\n", + ) + ), + ) + ad.add(Foo) + quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") + ad.add(quux) + fred = Distribution.from_filename("/foo_dir/fred-0.1.dist-info") + ad.add(fred) + res = list(ws.resolve(parse_requirements("Foo[baz,bar]"), ad)) + assert sorted(res) == [fred, quux, Foo] + + def test_marker_evaluation_with_extras_loop(self): + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + a = Distribution.from_filename( + "/foo_dir/a-0.2.dist-info", + metadata=Metadata(("METADATA", "Requires-Dist: c[a]")), + ) + b = Distribution.from_filename( + "/foo_dir/b-0.3.dist-info", + metadata=Metadata(("METADATA", "Requires-Dist: c[b]")), + ) + c = Distribution.from_filename( + "/foo_dir/c-1.0.dist-info", + metadata=Metadata( + ( + "METADATA", + "Provides-Extra: a\n" + "Requires-Dist: b;extra=='a'\n" + "Provides-Extra: b\n" + "Requires-Dist: foo;extra=='b'", + ) + ), + ) + foo = Distribution.from_filename("/foo_dir/foo-0.1.dist-info") + for dist in (a, b, c, foo): + ad.add(dist) + res = list(ws.resolve(parse_requirements("a"), ad)) + assert res == [a, c, b, foo] + + @pytest.mark.xfail( + sys.version_info[:2] == (3, 12) and sys.version_info.releaselevel != 'final', + reason="https://github.com/python/cpython/issues/103632", + ) + def testDistroDependsOptions(self): + d = self.distRequires( + """ + Twisted>=1.5 + [docgen] + ZConfig>=2.0 + docutils>=0.3 + [fastcgi] + fcgiapp>=0.1""" + ) + self.checkRequires(d, "Twisted>=1.5") + self.checkRequires( + d, "Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] + ) + self.checkRequires(d, "Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]) + self.checkRequires( + d, + "Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), + ["docgen", "fastcgi"], + ) + self.checkRequires( + d, + "Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), + ["fastcgi", "docgen"], + ) + with pytest.raises(pkg_resources.UnknownExtra): + d.requires(["foo"]) + + +class TestWorkingSet: + def test_find_conflicting(self): + ws = WorkingSet([]) + Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg") + ws.add(Foo) + + # create a requirement that conflicts with Foo 1.2 + req = next(parse_requirements("Foo<1.2")) + + with pytest.raises(VersionConflict) as vc: + ws.find(req) + + msg = 'Foo 1.2 is installed but Foo<1.2 is required' + assert vc.value.report() == msg + + def test_resolve_conflicts_with_prior(self): + """ + A ContextualVersionConflict should be raised when a requirement + conflicts with a prior requirement for a different package. + """ + # Create installation where Foo depends on Baz 1.0 and Bar depends on + # Baz 2.0. + ws = WorkingSet([]) + md = Metadata(('depends.txt', "Baz==1.0")) + Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md) + ws.add(Foo) + md = Metadata(('depends.txt', "Baz==2.0")) + Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md) + ws.add(Bar) + Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg") + ws.add(Baz) + Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg") + ws.add(Baz) + + with pytest.raises(VersionConflict) as vc: + ws.resolve(parse_requirements("Foo\nBar\n")) + + msg = "Baz 1.0 is installed but Baz==2.0 is required by " + msg += repr(set(['Bar'])) + assert vc.value.report() == msg + + +class TestEntryPoints: + def assertfields(self, ep): + assert ep.name == "foo" + assert ep.module_name == "pkg_resources.tests.test_resources" + assert ep.attrs == ("TestEntryPoints",) + assert ep.extras == ("x",) + assert ep.load() is TestEntryPoints + expect = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]" + assert str(ep) == expect + + def setup_method(self, method): + self.dist = Distribution.from_filename( + "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt', '[x]')) + ) + + def testBasics(self): + ep = EntryPoint( + "foo", + "pkg_resources.tests.test_resources", + ["TestEntryPoints"], + ["x"], + self.dist, + ) + self.assertfields(ep) + + def testParse(self): + s = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]" + ep = EntryPoint.parse(s, self.dist) + self.assertfields(ep) + + ep = EntryPoint.parse("bar baz= spammity[PING]") + assert ep.name == "bar baz" + assert ep.module_name == "spammity" + assert ep.attrs == () + assert ep.extras == ("ping",) + + ep = EntryPoint.parse(" fizzly = wocka:foo") + assert ep.name == "fizzly" + assert ep.module_name == "wocka" + assert ep.attrs == ("foo",) + assert ep.extras == () + + # plus in the name + spec = "html+mako = mako.ext.pygmentplugin:MakoHtmlLexer" + ep = EntryPoint.parse(spec) + assert ep.name == 'html+mako' + + reject_specs = "foo", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2" + + @pytest.mark.parametrize("reject_spec", reject_specs) + def test_reject_spec(self, reject_spec): + with pytest.raises(ValueError): + EntryPoint.parse(reject_spec) + + def test_printable_name(self): + """ + Allow any printable character in the name. + """ + # Create a name with all printable characters; strip the whitespace. + name = string.printable.strip() + spec = "{name} = module:attr".format(**locals()) + ep = EntryPoint.parse(spec) + assert ep.name == name + + def checkSubMap(self, m): + assert len(m) == len(self.submap_expect) + for key, ep in self.submap_expect.items(): + assert m.get(key).name == ep.name + assert m.get(key).module_name == ep.module_name + assert sorted(m.get(key).attrs) == sorted(ep.attrs) + assert sorted(m.get(key).extras) == sorted(ep.extras) + + submap_expect = dict( + feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), + feature2=EntryPoint( + 'feature2', 'another.module', ['SomeClass'], ['extra1', 'extra2'] + ), + feature3=EntryPoint('feature3', 'this.module', extras=['something']), + ) + submap_str = """ + # define features for blah blah + feature1 = somemodule:somefunction + feature2 = another.module:SomeClass [extra1,extra2] + feature3 = this.module [something] + """ + + def testParseList(self): + self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) + with pytest.raises(ValueError): + EntryPoint.parse_group("x a", "foo=bar") + with pytest.raises(ValueError): + EntryPoint.parse_group("x", ["foo=baz", "foo=bar"]) + + def testParseMap(self): + m = EntryPoint.parse_map({'xyz': self.submap_str}) + self.checkSubMap(m['xyz']) + assert list(m.keys()) == ['xyz'] + m = EntryPoint.parse_map("[xyz]\n" + self.submap_str) + self.checkSubMap(m['xyz']) + assert list(m.keys()) == ['xyz'] + with pytest.raises(ValueError): + EntryPoint.parse_map(["[xyz]", "[xyz]"]) + with pytest.raises(ValueError): + EntryPoint.parse_map(self.submap_str) + + def testDeprecationWarnings(self): + ep = EntryPoint( + "foo", "pkg_resources.tests.test_resources", ["TestEntryPoints"], ["x"] + ) + with pytest.warns(pkg_resources.PkgResourcesDeprecationWarning): + ep.load(require=False) + + +class TestRequirements: + def testBasics(self): + r = Requirement.parse("Twisted>=1.2") + assert str(r) == "Twisted>=1.2" + assert repr(r) == "Requirement.parse('Twisted>=1.2')" + assert r == Requirement("Twisted>=1.2") + assert r == Requirement("twisTed>=1.2") + assert r != Requirement("Twisted>=2.0") + assert r != Requirement("Zope>=1.2") + assert r != Requirement("Zope>=3.0") + assert r != Requirement("Twisted[extras]>=1.2") + + def testOrdering(self): + r1 = Requirement("Twisted==1.2c1,>=1.2") + r2 = Requirement("Twisted>=1.2,==1.2c1") + assert r1 == r2 + assert str(r1) == str(r2) + assert str(r2) == "Twisted==1.2c1,>=1.2" + assert Requirement("Twisted") != Requirement( + "Twisted @ https://localhost/twisted.zip" + ) + + def testBasicContains(self): + r = Requirement("Twisted>=1.2") + foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") + twist11 = Distribution.from_filename("Twisted-1.1.egg") + twist12 = Distribution.from_filename("Twisted-1.2.egg") + assert parse_version('1.2') in r + assert parse_version('1.1') not in r + assert '1.2' in r + assert '1.1' not in r + assert foo_dist not in r + assert twist11 not in r + assert twist12 in r + + def testOptionsAndHashing(self): + r1 = Requirement.parse("Twisted[foo,bar]>=1.2") + r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") + assert r1 == r2 + assert set(r1.extras) == set(("foo", "bar")) + assert set(r2.extras) == set(("foo", "bar")) + assert hash(r1) == hash(r2) + assert hash(r1) == hash( + ( + "twisted", + None, + packaging.specifiers.SpecifierSet(">=1.2"), + frozenset(["foo", "bar"]), + None, + ) + ) + assert hash( + Requirement.parse("Twisted @ https://localhost/twisted.zip") + ) == hash( + ( + "twisted", + "https://localhost/twisted.zip", + packaging.specifiers.SpecifierSet(), + frozenset(), + None, + ) + ) + + def testVersionEquality(self): + r1 = Requirement.parse("foo==0.3a2") + r2 = Requirement.parse("foo!=0.3a4") + d = Distribution.from_filename + + assert d("foo-0.3a4.egg") not in r1 + assert d("foo-0.3a1.egg") not in r1 + assert d("foo-0.3a4.egg") not in r2 + + assert d("foo-0.3a2.egg") in r1 + assert d("foo-0.3a2.egg") in r2 + assert d("foo-0.3a3.egg") in r2 + assert d("foo-0.3a5.egg") in r2 + + def testSetuptoolsProjectName(self): + """ + The setuptools project should implement the setuptools package. + """ + + assert Requirement.parse('setuptools').project_name == 'setuptools' + # setuptools 0.7 and higher means setuptools. + assert Requirement.parse('setuptools == 0.7').project_name == 'setuptools' + assert Requirement.parse('setuptools == 0.7a1').project_name == 'setuptools' + assert Requirement.parse('setuptools >= 0.7').project_name == 'setuptools' + + +class TestParsing: + def testEmptyParse(self): + assert list(parse_requirements('')) == [] + + def testYielding(self): + for inp, out in [ + ([], []), + ('x', ['x']), + ([[]], []), + (' x\n y', ['x', 'y']), + (['x\n\n', 'y'], ['x', 'y']), + ]: + assert list(pkg_resources.yield_lines(inp)) == out + + def testSplitting(self): + sample = """ + x + [Y] + z + + a + [b ] + # foo + c + [ d] + [q] + v + """ + assert list(pkg_resources.split_sections(sample)) == [ + (None, ["x"]), + ("Y", ["z", "a"]), + ("b", ["c"]), + ("d", []), + ("q", ["v"]), + ] + with pytest.raises(ValueError): + list(pkg_resources.split_sections("[foo")) + + def testSafeName(self): + assert safe_name("adns-python") == "adns-python" + assert safe_name("WSGI Utils") == "WSGI-Utils" + assert safe_name("WSGI Utils") == "WSGI-Utils" + assert safe_name("Money$$$Maker") == "Money-Maker" + assert safe_name("peak.web") != "peak-web" + + def testSafeVersion(self): + assert safe_version("1.2-1") == "1.2.post1" + assert safe_version("1.2 alpha") == "1.2.alpha" + assert safe_version("2.3.4 20050521") == "2.3.4.20050521" + assert safe_version("Money$$$Maker") == "Money-Maker" + assert safe_version("peak.web") == "peak.web" + + def testSimpleRequirements(self): + assert list(parse_requirements('Twis-Ted>=1.2-1')) == [ + Requirement('Twis-Ted>=1.2-1') + ] + assert list(parse_requirements('Twisted >=1.2, \\ # more\n<2.0')) == [ + Requirement('Twisted>=1.2,<2.0') + ] + assert Requirement.parse("FooBar==1.99a3") == Requirement("FooBar==1.99a3") + with pytest.raises(ValueError): + Requirement.parse(">=2.3") + with pytest.raises(ValueError): + Requirement.parse("x\\") + with pytest.raises(ValueError): + Requirement.parse("x==2 q") + with pytest.raises(ValueError): + Requirement.parse("X==1\nY==2") + with pytest.raises(ValueError): + Requirement.parse("#") + + def test_requirements_with_markers(self): + assert Requirement.parse("foobar;os_name=='a'") == Requirement.parse( + "foobar;os_name=='a'" + ) + assert Requirement.parse( + "name==1.1;python_version=='2.7'" + ) != Requirement.parse("name==1.1;python_version=='3.6'") + assert Requirement.parse( + "name==1.0;python_version=='2.7'" + ) != Requirement.parse("name==1.2;python_version=='2.7'") + assert Requirement.parse( + "name[foo]==1.0;python_version=='3.6'" + ) != Requirement.parse("name[foo,bar]==1.0;python_version=='3.6'") + + def test_local_version(self): + (req,) = parse_requirements('foo==1.0+org1') + + def test_spaces_between_multiple_versions(self): + (req,) = parse_requirements('foo>=1.0, <3') + (req,) = parse_requirements('foo >= 1.0, < 3') + + @pytest.mark.parametrize( + ['lower', 'upper'], + [ + ('1.2-rc1', '1.2rc1'), + ('0.4', '0.4.0'), + ('0.4.0.0', '0.4.0'), + ('0.4.0-0', '0.4-0'), + ('0post1', '0.0post1'), + ('0pre1', '0.0c1'), + ('0.0.0preview1', '0c1'), + ('0.0c1', '0-rc1'), + ('1.2a1', '1.2.a.1'), + ('1.2.a', '1.2a'), + ], + ) + def testVersionEquality(self, lower, upper): + assert parse_version(lower) == parse_version(upper) + + torture = """ + 0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1 + 0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2 + 0.77.2-1 0.77.1-1 0.77.0-1 + """ + + @pytest.mark.parametrize( + ['lower', 'upper'], + [ + ('2.1', '2.1.1'), + ('2a1', '2b0'), + ('2a1', '2.1'), + ('2.3a1', '2.3'), + ('2.1-1', '2.1-2'), + ('2.1-1', '2.1.1'), + ('2.1', '2.1post4'), + ('2.1a0-20040501', '2.1'), + ('1.1', '02.1'), + ('3.2', '3.2.post0'), + ('3.2post1', '3.2post2'), + ('0.4', '4.0'), + ('0.0.4', '0.4.0'), + ('0post1', '0.4post1'), + ('2.1.0-rc1', '2.1.0'), + ('2.1dev', '2.1a0'), + ] + + list(pairwise(reversed(torture.split()))), + ) + def testVersionOrdering(self, lower, upper): + assert parse_version(lower) < parse_version(upper) + + def testVersionHashable(self): + """ + Ensure that our versions stay hashable even though we've subclassed + them and added some shim code to them. + """ + assert hash(parse_version("1.0")) == hash(parse_version("1.0")) + + +class TestNamespaces: + ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n" + + @pytest.fixture + def symlinked_tmpdir(self, tmpdir): + """ + Where available, return the tempdir as a symlink, + which as revealed in #231 is more fragile than + a natural tempdir. + """ + if not hasattr(os, 'symlink'): + yield str(tmpdir) + return + + link_name = str(tmpdir) + '-linked' + os.symlink(str(tmpdir), link_name) + try: + yield type(tmpdir)(link_name) + finally: + os.unlink(link_name) + + @pytest.fixture(autouse=True) + def patched_path(self, tmpdir): + """ + Patch sys.path to include the 'site-pkgs' dir. Also + restore pkg_resources._namespace_packages to its + former state. + """ + saved_ns_pkgs = pkg_resources._namespace_packages.copy() + saved_sys_path = sys.path[:] + site_pkgs = tmpdir.mkdir('site-pkgs') + sys.path.append(str(site_pkgs)) + try: + yield + finally: + pkg_resources._namespace_packages = saved_ns_pkgs + sys.path = saved_sys_path + + issue591 = pytest.mark.xfail(platform.system() == 'Windows', reason="#591") + + @issue591 + def test_two_levels_deep(self, symlinked_tmpdir): + """ + Test nested namespace packages + Create namespace packages in the following tree : + site-packages-1/pkg1/pkg2 + site-packages-2/pkg1/pkg2 + Check both are in the _namespace_packages dict and that their __path__ + is correct + """ + real_tmpdir = symlinked_tmpdir.realpath() + tmpdir = symlinked_tmpdir + sys.path.append(str(tmpdir / 'site-pkgs2')) + site_dirs = tmpdir / 'site-pkgs', tmpdir / 'site-pkgs2' + for site in site_dirs: + pkg1 = site / 'pkg1' + pkg2 = pkg1 / 'pkg2' + pkg2.ensure_dir() + (pkg1 / '__init__.py').write_text(self.ns_str, encoding='utf-8') + (pkg2 / '__init__.py').write_text(self.ns_str, encoding='utf-8') + with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): + import pkg1 + assert "pkg1" in pkg_resources._namespace_packages + # attempt to import pkg2 from site-pkgs2 + with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): + import pkg1.pkg2 + # check the _namespace_packages dict + assert "pkg1.pkg2" in pkg_resources._namespace_packages + assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"] + # check the __path__ attribute contains both paths + expected = [ + str(real_tmpdir / "site-pkgs" / "pkg1" / "pkg2"), + str(real_tmpdir / "site-pkgs2" / "pkg1" / "pkg2"), + ] + assert pkg1.pkg2.__path__ == expected + + @issue591 + def test_path_order(self, symlinked_tmpdir): + """ + Test that if multiple versions of the same namespace package subpackage + are on different sys.path entries, that only the one earliest on + sys.path is imported, and that the namespace package's __path__ is in + the correct order. + + Regression test for https://github.com/pypa/setuptools/issues/207 + """ + + tmpdir = symlinked_tmpdir + site_dirs = ( + tmpdir / "site-pkgs", + tmpdir / "site-pkgs2", + tmpdir / "site-pkgs3", + ) + + vers_str = "__version__ = %r" + + for number, site in enumerate(site_dirs, 1): + if number > 1: + sys.path.append(str(site)) + nspkg = site / 'nspkg' + subpkg = nspkg / 'subpkg' + subpkg.ensure_dir() + (nspkg / '__init__.py').write_text(self.ns_str, encoding='utf-8') + (subpkg / '__init__.py').write_text(vers_str % number, encoding='utf-8') + + with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): + import nspkg.subpkg + import nspkg + expected = [str(site.realpath() / 'nspkg') for site in site_dirs] + assert nspkg.__path__ == expected + assert nspkg.subpkg.__version__ == 1 diff --git a/pyproject.toml b/pyproject.toml index 9fe4bac237..eae729c05a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,23 +11,3 @@ extend_exclude = "_vendor" [tool.pytest-enabler.mypy] # disabled - - -[tool.mypy] -# CI should test for all versions, local development gets hints for oldest supported -python_version = "3.8" -strict = false -# TODO: Not all dependencies are typed. setuptools itself should be typed too -# TODO: Test environment is not yet properly configured to install all imported packages -ignore_missing_imports = true -# required to support namespace packages: https://github.com/python/mypy/issues/14057 -explicit_package_bases = true -exclude = [ - "^build/", - "^.tox/", - # Duplicate module name - "^pkg_resources/tests/data/my-test-package-source/setup.py$", - # Vendored - "^.+?/_vendor", - "^setuptools/_distutils", -] diff --git a/setup.cfg b/setup.cfg index f9d82a6abe..04050dce0c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -52,7 +52,7 @@ testing = pytest-cov; \ # coverage seems to make PyPy extremely slow python_implementation != "PyPy" - pytest-mypy >= 0.10.3; \ + pytest-mypy >= 0.9.1; \ # workaround for jaraco/skeleton#22 python_implementation != "PyPy" pytest-enabler >= 2.2 diff --git a/setup.py b/setup.py index f05017e6f0..1a6074766a 100755 --- a/setup.py +++ b/setup.py @@ -88,6 +88,6 @@ def _restore_install_lib(self): if __name__ == '__main__': # allow setup.py to run from another directory - if here: - os.chdir(here) + # TODO: Use a proper conditonal statement here + here and os.chdir(here) # type: ignore[func-returns-value] dist = setuptools.setup(**setup_params) diff --git a/setuptools/_importlib.py b/setuptools/_importlib.py index bd2b01e2b5..fadfd00e58 100644 --- a/setuptools/_importlib.py +++ b/setuptools/_importlib.py @@ -1,51 +1,51 @@ -import sys - - -def disable_importlib_metadata_finder(metadata): - """ - Ensure importlib_metadata doesn't provide older, incompatible - Distributions. - - Workaround for #3102. - """ - try: - import importlib_metadata - except ImportError: - return - except AttributeError: - from .warnings import SetuptoolsWarning - - SetuptoolsWarning.emit( - "Incompatibility problem.", - """ - `importlib-metadata` version is incompatible with `setuptools`. - This problem is likely to be solved by installing an updated version of - `importlib-metadata`. - """, - see_url="https://github.com/python/importlib_metadata/issues/396", - ) # Ensure a descriptive message is shown. - raise # This exception can be suppressed by _distutils_hack - - if importlib_metadata is metadata: - return - to_remove = [ - ob - for ob in sys.meta_path - if isinstance(ob, importlib_metadata.MetadataPathFinder) - ] - for item in to_remove: - sys.meta_path.remove(item) - - -if sys.version_info < (3, 10): - from setuptools.extern import importlib_metadata as metadata - - disable_importlib_metadata_finder(metadata) -else: - import importlib.metadata as metadata # noqa: F401 - - -if sys.version_info < (3, 9): - from setuptools.extern import importlib_resources as resources -else: - import importlib.resources as resources # noqa: F401 +import sys + + +def disable_importlib_metadata_finder(metadata): + """ + Ensure importlib_metadata doesn't provide older, incompatible + Distributions. + + Workaround for #3102. + """ + try: + import importlib_metadata + except ImportError: + return + except AttributeError: + from .warnings import SetuptoolsWarning + + SetuptoolsWarning.emit( + "Incompatibility problem.", + """ + `importlib-metadata` version is incompatible with `setuptools`. + This problem is likely to be solved by installing an updated version of + `importlib-metadata`. + """, + see_url="https://github.com/python/importlib_metadata/issues/396", + ) # Ensure a descriptive message is shown. + raise # This exception can be suppressed by _distutils_hack + + if importlib_metadata is metadata: + return + to_remove = [ + ob + for ob in sys.meta_path + if isinstance(ob, importlib_metadata.MetadataPathFinder) + ] + for item in to_remove: + sys.meta_path.remove(item) + + +if sys.version_info < (3, 10): + from setuptools.extern import importlib_metadata as metadata # type: ignore[attr-defined] + + disable_importlib_metadata_finder(metadata) +else: + import importlib.metadata as metadata # noqa: F401 + + +if sys.version_info < (3, 9): + from setuptools.extern import importlib_resources as resources # type: ignore[attr-defined] +else: + import importlib.resources as resources # noqa: F401 diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py index 3e94e662ef..9c7455a47d 100644 --- a/setuptools/_normalization.py +++ b/setuptools/_normalization.py @@ -1,125 +1,125 @@ -""" -Helpers for normalization as expected in wheel/sdist/module file names -and core metadata -""" -import re -from pathlib import Path -from typing import Union - -from .extern import packaging -from .warnings import SetuptoolsDeprecationWarning - -_Path = Union[str, Path] - -# https://packaging.python.org/en/latest/specifications/core-metadata/#name -_VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I) -_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9.]+", re.I) -_NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.I) - - -def safe_identifier(name: str) -> str: - """Make a string safe to be used as Python identifier. - >>> safe_identifier("12abc") - '_12abc' - >>> safe_identifier("__editable__.myns.pkg-78.9.3_local") - '__editable___myns_pkg_78_9_3_local' - """ - safe = re.sub(r'\W|^(?=\d)', '_', name) - assert safe.isidentifier() - return safe - - -def safe_name(component: str) -> str: - """Escape a component used as a project name according to Core Metadata. - >>> safe_name("hello world") - 'hello-world' - >>> safe_name("hello?world") - 'hello-world' - """ - # See pkg_resources.safe_name - return _UNSAFE_NAME_CHARS.sub("-", component) - - -def safe_version(version: str) -> str: - """Convert an arbitrary string into a valid version string. - >>> safe_version("1988 12 25") - '1988.12.25' - >>> safe_version("v0.2.1") - '0.2.1' - >>> safe_version("v0.2?beta") - '0.2b0' - >>> safe_version("v0.2 beta") - '0.2b0' - >>> safe_version("ubuntu lts") - Traceback (most recent call last): - ... - setuptools.extern.packaging.version.InvalidVersion: Invalid version: 'ubuntu.lts' - """ - v = version.replace(' ', '.') - try: - return str(packaging.version.Version(v)) - except packaging.version.InvalidVersion: - attempt = _UNSAFE_NAME_CHARS.sub("-", v) - return str(packaging.version.Version(attempt)) - - -def best_effort_version(version: str) -> str: - """Convert an arbitrary string into a version-like string. - >>> best_effort_version("v0.2 beta") - '0.2b0' - - >>> import warnings - >>> warnings.simplefilter("ignore", category=SetuptoolsDeprecationWarning) - >>> best_effort_version("ubuntu lts") - 'ubuntu.lts' - """ - # See pkg_resources.safe_version - try: - return safe_version(version) - except packaging.version.InvalidVersion: - SetuptoolsDeprecationWarning.emit( - f"Invalid version: {version!r}.", - f""" - Version {version!r} is not valid according to PEP 440. - - Please make sure to specify a valid version for your package. - Also note that future releases of setuptools may halt the build process - if an invalid version is given. - """, - see_url="https://peps.python.org/pep-0440/", - due_date=(2023, 9, 26), # See setuptools/dist _validate_version - ) - v = version.replace(' ', '.') - return safe_name(v) - - -def safe_extra(extra: str) -> str: - """Normalize extra name according to PEP 685 - >>> safe_extra("_FrIeNdLy-._.-bArD") - 'friendly-bard' - >>> safe_extra("FrIeNdLy-._.-bArD__._-") - 'friendly-bard' - """ - return _NON_ALPHANUMERIC.sub("-", extra).strip("-").lower() - - -def filename_component(value: str) -> str: - """Normalize each component of a filename (e.g. distribution/version part of wheel) - Note: ``value`` needs to be already normalized. - >>> filename_component("my-pkg") - 'my_pkg' - """ - return value.replace("-", "_").strip("_") - - -def safer_name(value: str) -> str: - """Like ``safe_name`` but can be used as filename component for wheel""" - # See bdist_wheel.safer_name - return filename_component(safe_name(value)) - - -def safer_best_effort_version(value: str) -> str: - """Like ``best_effort_version`` but can be used as filename component for wheel""" - # See bdist_wheel.safer_verion - # TODO: Replace with only safe_version in the future (no need for best effort) - return filename_component(best_effort_version(value)) +""" +Helpers for normalization as expected in wheel/sdist/module file names +and core metadata +""" +import re +from pathlib import Path +from typing import Union + +from .extern import packaging # type: ignore[attr-defined] +from .warnings import SetuptoolsDeprecationWarning + +_Path = Union[str, Path] + +# https://packaging.python.org/en/latest/specifications/core-metadata/#name +_VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I) +_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9.]+", re.I) +_NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.I) + + +def safe_identifier(name: str) -> str: + """Make a string safe to be used as Python identifier. + >>> safe_identifier("12abc") + '_12abc' + >>> safe_identifier("__editable__.myns.pkg-78.9.3_local") + '__editable___myns_pkg_78_9_3_local' + """ + safe = re.sub(r'\W|^(?=\d)', '_', name) + assert safe.isidentifier() + return safe + + +def safe_name(component: str) -> str: + """Escape a component used as a project name according to Core Metadata. + >>> safe_name("hello world") + 'hello-world' + >>> safe_name("hello?world") + 'hello-world' + """ + # See pkg_resources.safe_name + return _UNSAFE_NAME_CHARS.sub("-", component) + + +def safe_version(version: str) -> str: + """Convert an arbitrary string into a valid version string. + >>> safe_version("1988 12 25") + '1988.12.25' + >>> safe_version("v0.2.1") + '0.2.1' + >>> safe_version("v0.2?beta") + '0.2b0' + >>> safe_version("v0.2 beta") + '0.2b0' + >>> safe_version("ubuntu lts") + Traceback (most recent call last): + ... + setuptools.extern.packaging.version.InvalidVersion: Invalid version: 'ubuntu.lts' + """ + v = version.replace(' ', '.') + try: + return str(packaging.version.Version(v)) + except packaging.version.InvalidVersion: + attempt = _UNSAFE_NAME_CHARS.sub("-", v) + return str(packaging.version.Version(attempt)) + + +def best_effort_version(version: str) -> str: + """Convert an arbitrary string into a version-like string. + >>> best_effort_version("v0.2 beta") + '0.2b0' + + >>> import warnings + >>> warnings.simplefilter("ignore", category=SetuptoolsDeprecationWarning) + >>> best_effort_version("ubuntu lts") + 'ubuntu.lts' + """ + # See pkg_resources.safe_version + try: + return safe_version(version) + except packaging.version.InvalidVersion: + SetuptoolsDeprecationWarning.emit( + f"Invalid version: {version!r}.", + f""" + Version {version!r} is not valid according to PEP 440. + + Please make sure to specify a valid version for your package. + Also note that future releases of setuptools may halt the build process + if an invalid version is given. + """, + see_url="https://peps.python.org/pep-0440/", + due_date=(2023, 9, 26), # See setuptools/dist _validate_version + ) + v = version.replace(' ', '.') + return safe_name(v) + + +def safe_extra(extra: str) -> str: + """Normalize extra name according to PEP 685 + >>> safe_extra("_FrIeNdLy-._.-bArD") + 'friendly-bard' + >>> safe_extra("FrIeNdLy-._.-bArD__._-") + 'friendly-bard' + """ + return _NON_ALPHANUMERIC.sub("-", extra).strip("-").lower() + + +def filename_component(value: str) -> str: + """Normalize each component of a filename (e.g. distribution/version part of wheel) + Note: ``value`` needs to be already normalized. + >>> filename_component("my-pkg") + 'my_pkg' + """ + return value.replace("-", "_").strip("_") + + +def safer_name(value: str) -> str: + """Like ``safe_name`` but can be used as filename component for wheel""" + # See bdist_wheel.safer_name + return filename_component(safe_name(value)) + + +def safer_best_effort_version(value: str) -> str: + """Like ``best_effort_version`` but can be used as filename component for wheel""" + # See bdist_wheel.safer_verion + # TODO: Replace with only safe_version in the future (no need for best effort) + return filename_component(best_effort_version(value)) diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index a5199deb33..31463dc2b8 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -1,735 +1,735 @@ -"""setuptools.command.egg_info - -Create a distribution's .egg-info directory and contents""" - -from distutils.filelist import FileList as _FileList -from distutils.errors import DistutilsInternalError -from distutils.util import convert_path -from distutils import log -import distutils.errors -import distutils.filelist -import functools -import os -import re -import sys -import time -import collections - -from .._importlib import metadata -from .. import _entry_points, _normalization -from . import _requirestxt - -from setuptools import Command -from setuptools.command.sdist import sdist -from setuptools.command.sdist import walk_revctrl -from setuptools.command.setopt import edit_config -from setuptools.command import bdist_egg -import setuptools.unicode_utils as unicode_utils -from setuptools.glob import glob - -from setuptools.extern import packaging -from ..warnings import SetuptoolsDeprecationWarning - - -PY_MAJOR = '{}.{}'.format(*sys.version_info) - - -def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME - """ - Translate a file path glob like '*.txt' in to a regular expression. - This differs from fnmatch.translate which allows wildcards to match - directory separators. It also knows about '**/' which matches any number of - directories. - """ - pat = '' - - # This will split on '/' within [character classes]. This is deliberate. - chunks = glob.split(os.path.sep) - - sep = re.escape(os.sep) - valid_char = '[^%s]' % (sep,) - - for c, chunk in enumerate(chunks): - last_chunk = c == len(chunks) - 1 - - # Chunks that are a literal ** are globstars. They match anything. - if chunk == '**': - if last_chunk: - # Match anything if this is the last component - pat += '.*' - else: - # Match '(name/)*' - pat += '(?:%s+%s)*' % (valid_char, sep) - continue # Break here as the whole path component has been handled - - # Find any special characters in the remainder - i = 0 - chunk_len = len(chunk) - while i < chunk_len: - char = chunk[i] - if char == '*': - # Match any number of name characters - pat += valid_char + '*' - elif char == '?': - # Match a name character - pat += valid_char - elif char == '[': - # Character class - inner_i = i + 1 - # Skip initial !/] chars - if inner_i < chunk_len and chunk[inner_i] == '!': - inner_i = inner_i + 1 - if inner_i < chunk_len and chunk[inner_i] == ']': - inner_i = inner_i + 1 - - # Loop till the closing ] is found - while inner_i < chunk_len and chunk[inner_i] != ']': - inner_i = inner_i + 1 - - if inner_i >= chunk_len: - # Got to the end of the string without finding a closing ] - # Do not treat this as a matching group, but as a literal [ - pat += re.escape(char) - else: - # Grab the insides of the [brackets] - inner = chunk[i + 1 : inner_i] - char_class = '' - - # Class negation - if inner[0] == '!': - char_class = '^' - inner = inner[1:] - - char_class += re.escape(inner) - pat += '[%s]' % (char_class,) - - # Skip to the end ] - i = inner_i - else: - pat += re.escape(char) - i += 1 - - # Join each chunk with the dir separator - if not last_chunk: - pat += sep - - pat += r'\Z' - return re.compile(pat, flags=re.MULTILINE | re.DOTALL) - - -class InfoCommon: - tag_build = None - tag_date = None - - @property - def name(self): - return _normalization.safe_name(self.distribution.get_name()) - - def tagged_version(self): - tagged = self._maybe_tag(self.distribution.get_version()) - return _normalization.best_effort_version(tagged) - - def _maybe_tag(self, version): - """ - egg_info may be called more than once for a distribution, - in which case the version string already contains all tags. - """ - return ( - version - if self.vtags and self._already_tagged(version) - else version + self.vtags - ) - - def _already_tagged(self, version: str) -> bool: - # Depending on their format, tags may change with version normalization. - # So in addition the regular tags, we have to search for the normalized ones. - return version.endswith(self.vtags) or version.endswith(self._safe_tags()) - - def _safe_tags(self) -> str: - # To implement this we can rely on `safe_version` pretending to be version 0 - # followed by tags. Then we simply discard the starting 0 (fake version number) - return _normalization.best_effort_version(f"0{self.vtags}")[1:] - - def tags(self) -> str: - version = '' - if self.tag_build: - version += self.tag_build - if self.tag_date: - version += time.strftime("%Y%m%d") - return version - - vtags = property(tags) - - -class egg_info(InfoCommon, Command): - description = "create a distribution's .egg-info directory" - - user_options = [ - ( - 'egg-base=', - 'e', - "directory containing .egg-info directories" - " (default: top of the source tree)", - ), - ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), - ('tag-build=', 'b', "Specify explicit tag to add to version number"), - ('no-date', 'D', "Don't include date stamp [default]"), - ] - - boolean_options = ['tag-date'] - negative_opt = { - 'no-date': 'tag-date', - } - - def initialize_options(self): - self.egg_base = None - self.egg_name = None - self.egg_info = None - self.egg_version = None - self.ignore_egg_info_in_manifest = False - - #################################### - # allow the 'tag_svn_revision' to be detected and - # set, supporting sdists built on older Setuptools. - @property - def tag_svn_revision(self): - pass - - @tag_svn_revision.setter - def tag_svn_revision(self, value): - pass - - #################################### - - def save_version_info(self, filename): - """ - Materialize the value of date into the - build tag. Install build keys in a deterministic order - to avoid arbitrary reordering on subsequent builds. - """ - egg_info = collections.OrderedDict() - # follow the order these keys would have been added - # when PYTHONHASHSEED=0 - egg_info['tag_build'] = self.tags() - egg_info['tag_date'] = 0 - edit_config(filename, dict(egg_info=egg_info)) - - def finalize_options(self): - # Note: we need to capture the current value returned - # by `self.tagged_version()`, so we can later update - # `self.distribution.metadata.version` without - # repercussions. - self.egg_name = self.name - self.egg_version = self.tagged_version() - parsed_version = packaging.version.Version(self.egg_version) - - try: - is_version = isinstance(parsed_version, packaging.version.Version) - spec = "%s==%s" if is_version else "%s===%s" - packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version)) - except ValueError as e: - raise distutils.errors.DistutilsOptionError( - "Invalid distribution name or version syntax: %s-%s" - % (self.egg_name, self.egg_version) - ) from e - - if self.egg_base is None: - dirs = self.distribution.package_dir - self.egg_base = (dirs or {}).get('', os.curdir) - - self.ensure_dirname('egg_base') - self.egg_info = _normalization.filename_component(self.egg_name) + '.egg-info' - if self.egg_base != os.curdir: - self.egg_info = os.path.join(self.egg_base, self.egg_info) - - # Set package version for the benefit of dumber commands - # (e.g. sdist, bdist_wininst, etc.) - # - self.distribution.metadata.version = self.egg_version - - # If we bootstrapped around the lack of a PKG-INFO, as might be the - # case in a fresh checkout, make sure that any special tags get added - # to the version info - # - pd = self.distribution._patched_dist - key = getattr(pd, "key", None) or getattr(pd, "name", None) - if pd is not None and key == self.egg_name.lower(): - pd._version = self.egg_version - pd._parsed_version = packaging.version.Version(self.egg_version) - self.distribution._patched_dist = None - - def _get_egg_basename(self, py_version=PY_MAJOR, platform=None): - """Compute filename of the output egg. Private API.""" - return _egg_basename(self.egg_name, self.egg_version, py_version, platform) - - def write_or_delete_file(self, what, filename, data, force=False): - """Write `data` to `filename` or delete if empty - - If `data` is non-empty, this routine is the same as ``write_file()``. - If `data` is empty but not ``None``, this is the same as calling - ``delete_file(filename)`. If `data` is ``None``, then this is a no-op - unless `filename` exists, in which case a warning is issued about the - orphaned file (if `force` is false), or deleted (if `force` is true). - """ - if data: - self.write_file(what, filename, data) - elif os.path.exists(filename): - if data is None and not force: - log.warn("%s not set in setup(), but %s exists", what, filename) - return - else: - self.delete_file(filename) - - def write_file(self, what, filename, data): - """Write `data` to `filename` (if not a dry run) after announcing it - - `what` is used in a log message to identify what is being written - to the file. - """ - log.info("writing %s to %s", what, filename) - data = data.encode("utf-8") - if not self.dry_run: - f = open(filename, 'wb') - f.write(data) - f.close() - - def delete_file(self, filename): - """Delete `filename` (if not a dry run) after announcing it""" - log.info("deleting %s", filename) - if not self.dry_run: - os.unlink(filename) - - def run(self): - self.mkpath(self.egg_info) - try: - os.utime(self.egg_info, None) - except OSError as e: - msg = f"Cannot update time stamp of directory '{self.egg_info}'" - raise distutils.errors.DistutilsFileError(msg) from e - for ep in metadata.entry_points(group='egg_info.writers'): - writer = ep.load() - writer(self, ep.name, os.path.join(self.egg_info, ep.name)) - - # Get rid of native_libs.txt if it was put there by older bdist_egg - nl = os.path.join(self.egg_info, "native_libs.txt") - if os.path.exists(nl): - self.delete_file(nl) - - self.find_sources() - - def find_sources(self): - """Generate SOURCES.txt manifest file""" - manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") - mm = manifest_maker(self.distribution) - mm.ignore_egg_info_dir = self.ignore_egg_info_in_manifest - mm.manifest = manifest_filename - mm.run() - self.filelist = mm.filelist - - -class FileList(_FileList): - # Implementations of the various MANIFEST.in commands - - def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir=False): - super().__init__(warn, debug_print) - self.ignore_egg_info_dir = ignore_egg_info_dir - - def process_template_line(self, line): - # Parse the line: split it up, make sure the right number of words - # is there, and return the relevant words. 'action' is always - # defined: it's the first word of the line. Which of the other - # three are defined depends on the action; it'll be either - # patterns, (dir and patterns), or (dir_pattern). - (action, patterns, dir, dir_pattern) = self._parse_template_line(line) - - action_map = { - 'include': self.include, - 'exclude': self.exclude, - 'global-include': self.global_include, - 'global-exclude': self.global_exclude, - 'recursive-include': functools.partial( - self.recursive_include, - dir, - ), - 'recursive-exclude': functools.partial( - self.recursive_exclude, - dir, - ), - 'graft': self.graft, - 'prune': self.prune, - } - log_map = { - 'include': "warning: no files found matching '%s'", - 'exclude': ("warning: no previously-included files found " "matching '%s'"), - 'global-include': ( - "warning: no files found matching '%s' " "anywhere in distribution" - ), - 'global-exclude': ( - "warning: no previously-included files matching " - "'%s' found anywhere in distribution" - ), - 'recursive-include': ( - "warning: no files found matching '%s' " "under directory '%s'" - ), - 'recursive-exclude': ( - "warning: no previously-included files matching " - "'%s' found under directory '%s'" - ), - 'graft': "warning: no directories found matching '%s'", - 'prune': "no previously-included directories found matching '%s'", - } - - try: - process_action = action_map[action] - except KeyError: - raise DistutilsInternalError( - "this cannot happen: invalid action '{action!s}'".format(action=action), - ) - - # OK, now we know that the action is valid and we have the - # right number of words on the line for that action -- so we - # can proceed with minimal error-checking. - - action_is_recursive = action.startswith('recursive-') - if action in {'graft', 'prune'}: - patterns = [dir_pattern] - extra_log_args = (dir,) if action_is_recursive else () - log_tmpl = log_map[action] - - self.debug_print( - ' '.join( - [action] + ([dir] if action_is_recursive else []) + patterns, - ) - ) - for pattern in patterns: - if not process_action(pattern): - log.warn(log_tmpl, pattern, *extra_log_args) - - def _remove_files(self, predicate): - """ - Remove all files from the file list that match the predicate. - Return True if any matching files were removed - """ - found = False - for i in range(len(self.files) - 1, -1, -1): - if predicate(self.files[i]): - self.debug_print(" removing " + self.files[i]) - del self.files[i] - found = True - return found - - def include(self, pattern): - """Include files that match 'pattern'.""" - found = [f for f in glob(pattern) if not os.path.isdir(f)] - self.extend(found) - return bool(found) - - def exclude(self, pattern): - """Exclude files that match 'pattern'.""" - match = translate_pattern(pattern) - return self._remove_files(match.match) - - def recursive_include(self, dir, pattern): - """ - Include all files anywhere in 'dir/' that match the pattern. - """ - full_pattern = os.path.join(dir, '**', pattern) - found = [f for f in glob(full_pattern, recursive=True) if not os.path.isdir(f)] - self.extend(found) - return bool(found) - - def recursive_exclude(self, dir, pattern): - """ - Exclude any file anywhere in 'dir/' that match the pattern. - """ - match = translate_pattern(os.path.join(dir, '**', pattern)) - return self._remove_files(match.match) - - def graft(self, dir): - """Include all files from 'dir/'.""" - found = [ - item - for match_dir in glob(dir) - for item in distutils.filelist.findall(match_dir) - ] - self.extend(found) - return bool(found) - - def prune(self, dir): - """Filter out files from 'dir/'.""" - match = translate_pattern(os.path.join(dir, '**')) - return self._remove_files(match.match) - - def global_include(self, pattern): - """ - Include all files anywhere in the current directory that match the - pattern. This is very inefficient on large file trees. - """ - if self.allfiles is None: - self.findall() - match = translate_pattern(os.path.join('**', pattern)) - found = [f for f in self.allfiles if match.match(f)] - self.extend(found) - return bool(found) - - def global_exclude(self, pattern): - """ - Exclude all files anywhere that match the pattern. - """ - match = translate_pattern(os.path.join('**', pattern)) - return self._remove_files(match.match) - - def append(self, item): - if item.endswith('\r'): # Fix older sdists built on Windows - item = item[:-1] - path = convert_path(item) - - if self._safe_path(path): - self.files.append(path) - - def extend(self, paths): - self.files.extend(filter(self._safe_path, paths)) - - def _repair(self): - """ - Replace self.files with only safe paths - - Because some owners of FileList manipulate the underlying - ``files`` attribute directly, this method must be called to - repair those paths. - """ - self.files = list(filter(self._safe_path, self.files)) - - def _safe_path(self, path): - enc_warn = "'%s' not %s encodable -- skipping" - - # To avoid accidental trans-codings errors, first to unicode - u_path = unicode_utils.filesys_decode(path) - if u_path is None: - log.warn("'%s' in unexpected encoding -- skipping" % path) - return False - - # Must ensure utf-8 encodability - utf8_path = unicode_utils.try_encode(u_path, "utf-8") - if utf8_path is None: - log.warn(enc_warn, path, 'utf-8') - return False - - try: - # ignore egg-info paths - is_egg_info = ".egg-info" in u_path or b".egg-info" in utf8_path - if self.ignore_egg_info_dir and is_egg_info: - return False - # accept is either way checks out - if os.path.exists(u_path) or os.path.exists(utf8_path): - return True - # this will catch any encode errors decoding u_path - except UnicodeEncodeError: - log.warn(enc_warn, path, sys.getfilesystemencoding()) - - -class manifest_maker(sdist): - template = "MANIFEST.in" - - def initialize_options(self): - self.use_defaults = 1 - self.prune = 1 - self.manifest_only = 1 - self.force_manifest = 1 - self.ignore_egg_info_dir = False - - def finalize_options(self): - pass - - def run(self): - self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir) - if not os.path.exists(self.manifest): - self.write_manifest() # it must exist so it'll get in the list - self.add_defaults() - if os.path.exists(self.template): - self.read_template() - self.add_license_files() - self._add_referenced_files() - self.prune_file_list() - self.filelist.sort() - self.filelist.remove_duplicates() - self.write_manifest() - - def _manifest_normalize(self, path): - path = unicode_utils.filesys_decode(path) - return path.replace(os.sep, '/') - - def write_manifest(self): - """ - Write the file list in 'self.filelist' to the manifest file - named by 'self.manifest'. - """ - self.filelist._repair() - - # Now _repairs should encodability, but not unicode - files = [self._manifest_normalize(f) for f in self.filelist.files] - msg = "writing manifest file '%s'" % self.manifest - self.execute(write_file, (self.manifest, files), msg) - - def warn(self, msg): - if not self._should_suppress_warning(msg): - sdist.warn(self, msg) - - @staticmethod - def _should_suppress_warning(msg): - """ - suppress missing-file warnings from sdist - """ - return re.match(r"standard file .*not found", msg) - - def add_defaults(self): - sdist.add_defaults(self) - self.filelist.append(self.template) - self.filelist.append(self.manifest) - rcfiles = list(walk_revctrl()) - if rcfiles: - self.filelist.extend(rcfiles) - elif os.path.exists(self.manifest): - self.read_manifest() - - if os.path.exists("setup.py"): - # setup.py should be included by default, even if it's not - # the script called to create the sdist - self.filelist.append("setup.py") - - ei_cmd = self.get_finalized_command('egg_info') - self.filelist.graft(ei_cmd.egg_info) - - def add_license_files(self): - license_files = self.distribution.metadata.license_files or [] - for lf in license_files: - log.info("adding license file '%s'", lf) - self.filelist.extend(license_files) - - def _add_referenced_files(self): - """Add files referenced by the config (e.g. `file:` directive) to filelist""" - referenced = getattr(self.distribution, '_referenced_files', []) - # ^-- fallback if dist comes from distutils or is a custom class - for rf in referenced: - log.debug("adding file referenced by config '%s'", rf) - self.filelist.extend(referenced) - - def prune_file_list(self): - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - self.filelist.prune(build.build_base) - self.filelist.prune(base_dir) - sep = re.escape(os.sep) - self.filelist.exclude_pattern( - r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=1 - ) - - def _safe_data_files(self, build_py): - """ - The parent class implementation of this method - (``sdist``) will try to include data files, which - might cause recursion problems when - ``include_package_data=True``. - - Therefore, avoid triggering any attempt of - analyzing/building the manifest again. - """ - if hasattr(build_py, 'get_data_files_without_manifest'): - return build_py.get_data_files_without_manifest() - - SetuptoolsDeprecationWarning.emit( - "`build_py` command does not inherit from setuptools' `build_py`.", - """ - Custom 'build_py' does not implement 'get_data_files_without_manifest'. - Please extend command classes from setuptools instead of distutils. - """, - see_url="https://peps.python.org/pep-0632/", - # due_date not defined yet, old projects might still do it? - ) - return build_py.get_data_files() - - -def write_file(filename, contents): - """Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ - contents = "\n".join(contents) - - # assuming the contents has been vetted for utf-8 encoding - contents = contents.encode("utf-8") - - with open(filename, "wb") as f: # always write POSIX-style manifest - f.write(contents) - - -def write_pkg_info(cmd, basename, filename): - log.info("writing %s", filename) - if not cmd.dry_run: - metadata = cmd.distribution.metadata - metadata.version, oldver = cmd.egg_version, metadata.version - metadata.name, oldname = cmd.egg_name, metadata.name - - try: - # write unescaped data to PKG-INFO, so older pkg_resources - # can still parse it - metadata.write_pkg_info(cmd.egg_info) - finally: - metadata.name, metadata.version = oldname, oldver - - safe = getattr(cmd.distribution, 'zip_safe', None) - - bdist_egg.write_safety_flag(cmd.egg_info, safe) - - -def warn_depends_obsolete(cmd, basename, filename): - """ - Unused: left to avoid errors when updating (from source) from <= 67.8. - Old installations have a .dist-info directory with the entry-point - ``depends.txt = setuptools.command.egg_info:warn_depends_obsolete``. - This may trigger errors when running the first egg_info in build_meta. - TODO: Remove this function in a version sufficiently > 68. - """ - - -# Export API used in entry_points -write_requirements = _requirestxt.write_requirements -write_setup_requirements = _requirestxt.write_setup_requirements - - -def write_toplevel_names(cmd, basename, filename): - pkgs = dict.fromkeys( - [k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names()] - ) - cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') - - -def overwrite_arg(cmd, basename, filename): - write_arg(cmd, basename, filename, True) - - -def write_arg(cmd, basename, filename, force=False): - argname = os.path.splitext(basename)[0] - value = getattr(cmd.distribution, argname, None) - if value is not None: - value = '\n'.join(value) + '\n' - cmd.write_or_delete_file(argname, filename, value, force) - - -def write_entries(cmd, basename, filename): - eps = _entry_points.load(cmd.distribution.entry_points) - defn = _entry_points.render(eps) - cmd.write_or_delete_file('entry points', filename, defn, True) - - -def _egg_basename(egg_name, egg_version, py_version=None, platform=None): - """Compute filename of the output egg. Private API.""" - name = _normalization.filename_component(egg_name) - version = _normalization.filename_component(egg_version) - egg = f"{name}-{version}-py{py_version or PY_MAJOR}" - if platform: - egg += f"-{platform}" - return egg - - -class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): - """Deprecated behavior warning for EggInfo, bypassing suppression.""" +"""setuptools.command.egg_info + +Create a distribution's .egg-info directory and contents""" + +from distutils.filelist import FileList as _FileList +from distutils.errors import DistutilsInternalError +from distutils.util import convert_path +from distutils import log +import distutils.errors +import distutils.filelist +import functools +import os +import re +import sys +import time +import collections + +from .._importlib import metadata +from .. import _entry_points, _normalization +from . import _requirestxt + +from setuptools import Command +from setuptools.command.sdist import sdist +from setuptools.command.sdist import walk_revctrl +from setuptools.command.setopt import edit_config +from setuptools.command import bdist_egg +import setuptools.unicode_utils as unicode_utils +from setuptools.glob import glob + +from setuptools.extern import packaging # type: ignore[attr-defined] +from ..warnings import SetuptoolsDeprecationWarning + + +PY_MAJOR = '{}.{}'.format(*sys.version_info) + + +def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME + """ + Translate a file path glob like '*.txt' in to a regular expression. + This differs from fnmatch.translate which allows wildcards to match + directory separators. It also knows about '**/' which matches any number of + directories. + """ + pat = '' + + # This will split on '/' within [character classes]. This is deliberate. + chunks = glob.split(os.path.sep) + + sep = re.escape(os.sep) + valid_char = '[^%s]' % (sep,) + + for c, chunk in enumerate(chunks): + last_chunk = c == len(chunks) - 1 + + # Chunks that are a literal ** are globstars. They match anything. + if chunk == '**': + if last_chunk: + # Match anything if this is the last component + pat += '.*' + else: + # Match '(name/)*' + pat += '(?:%s+%s)*' % (valid_char, sep) + continue # Break here as the whole path component has been handled + + # Find any special characters in the remainder + i = 0 + chunk_len = len(chunk) + while i < chunk_len: + char = chunk[i] + if char == '*': + # Match any number of name characters + pat += valid_char + '*' + elif char == '?': + # Match a name character + pat += valid_char + elif char == '[': + # Character class + inner_i = i + 1 + # Skip initial !/] chars + if inner_i < chunk_len and chunk[inner_i] == '!': + inner_i = inner_i + 1 + if inner_i < chunk_len and chunk[inner_i] == ']': + inner_i = inner_i + 1 + + # Loop till the closing ] is found + while inner_i < chunk_len and chunk[inner_i] != ']': + inner_i = inner_i + 1 + + if inner_i >= chunk_len: + # Got to the end of the string without finding a closing ] + # Do not treat this as a matching group, but as a literal [ + pat += re.escape(char) + else: + # Grab the insides of the [brackets] + inner = chunk[i + 1 : inner_i] + char_class = '' + + # Class negation + if inner[0] == '!': + char_class = '^' + inner = inner[1:] + + char_class += re.escape(inner) + pat += '[%s]' % (char_class,) + + # Skip to the end ] + i = inner_i + else: + pat += re.escape(char) + i += 1 + + # Join each chunk with the dir separator + if not last_chunk: + pat += sep + + pat += r'\Z' + return re.compile(pat, flags=re.MULTILINE | re.DOTALL) + + +class InfoCommon: + tag_build = None + tag_date = None + + @property + def name(self): + return _normalization.safe_name(self.distribution.get_name()) + + def tagged_version(self): + tagged = self._maybe_tag(self.distribution.get_version()) + return _normalization.best_effort_version(tagged) + + def _maybe_tag(self, version): + """ + egg_info may be called more than once for a distribution, + in which case the version string already contains all tags. + """ + return ( + version + if self.vtags and self._already_tagged(version) + else version + self.vtags + ) + + def _already_tagged(self, version: str) -> bool: + # Depending on their format, tags may change with version normalization. + # So in addition the regular tags, we have to search for the normalized ones. + return version.endswith(self.vtags) or version.endswith(self._safe_tags()) + + def _safe_tags(self) -> str: + # To implement this we can rely on `safe_version` pretending to be version 0 + # followed by tags. Then we simply discard the starting 0 (fake version number) + return _normalization.best_effort_version(f"0{self.vtags}")[1:] + + def tags(self) -> str: + version = '' + if self.tag_build: + version += self.tag_build + if self.tag_date: + version += time.strftime("%Y%m%d") + return version + + vtags = property(tags) + + +class egg_info(InfoCommon, Command): + description = "create a distribution's .egg-info directory" + + user_options = [ + ( + 'egg-base=', + 'e', + "directory containing .egg-info directories" + " (default: top of the source tree)", + ), + ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), + ('tag-build=', 'b', "Specify explicit tag to add to version number"), + ('no-date', 'D', "Don't include date stamp [default]"), + ] + + boolean_options = ['tag-date'] + negative_opt = { + 'no-date': 'tag-date', + } + + def initialize_options(self): + self.egg_base = None + self.egg_name = None + self.egg_info = None + self.egg_version = None + self.ignore_egg_info_in_manifest = False + + #################################### + # allow the 'tag_svn_revision' to be detected and + # set, supporting sdists built on older Setuptools. + @property + def tag_svn_revision(self): + pass + + @tag_svn_revision.setter + def tag_svn_revision(self, value): + pass + + #################################### + + def save_version_info(self, filename): + """ + Materialize the value of date into the + build tag. Install build keys in a deterministic order + to avoid arbitrary reordering on subsequent builds. + """ + egg_info = collections.OrderedDict() + # follow the order these keys would have been added + # when PYTHONHASHSEED=0 + egg_info['tag_build'] = self.tags() + egg_info['tag_date'] = 0 + edit_config(filename, dict(egg_info=egg_info)) + + def finalize_options(self): + # Note: we need to capture the current value returned + # by `self.tagged_version()`, so we can later update + # `self.distribution.metadata.version` without + # repercussions. + self.egg_name = self.name + self.egg_version = self.tagged_version() + parsed_version = packaging.version.Version(self.egg_version) + + try: + is_version = isinstance(parsed_version, packaging.version.Version) + spec = "%s==%s" if is_version else "%s===%s" + packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version)) + except ValueError as e: + raise distutils.errors.DistutilsOptionError( + "Invalid distribution name or version syntax: %s-%s" + % (self.egg_name, self.egg_version) + ) from e + + if self.egg_base is None: + dirs = self.distribution.package_dir + self.egg_base = (dirs or {}).get('', os.curdir) + + self.ensure_dirname('egg_base') + self.egg_info = _normalization.filename_component(self.egg_name) + '.egg-info' + if self.egg_base != os.curdir: + self.egg_info = os.path.join(self.egg_base, self.egg_info) + + # Set package version for the benefit of dumber commands + # (e.g. sdist, bdist_wininst, etc.) + # + self.distribution.metadata.version = self.egg_version + + # If we bootstrapped around the lack of a PKG-INFO, as might be the + # case in a fresh checkout, make sure that any special tags get added + # to the version info + # + pd = self.distribution._patched_dist + key = getattr(pd, "key", None) or getattr(pd, "name", None) + if pd is not None and key == self.egg_name.lower(): + pd._version = self.egg_version + pd._parsed_version = packaging.version.Version(self.egg_version) + self.distribution._patched_dist = None + + def _get_egg_basename(self, py_version=PY_MAJOR, platform=None): + """Compute filename of the output egg. Private API.""" + return _egg_basename(self.egg_name, self.egg_version, py_version, platform) + + def write_or_delete_file(self, what, filename, data, force=False): + """Write `data` to `filename` or delete if empty + + If `data` is non-empty, this routine is the same as ``write_file()``. + If `data` is empty but not ``None``, this is the same as calling + ``delete_file(filename)`. If `data` is ``None``, then this is a no-op + unless `filename` exists, in which case a warning is issued about the + orphaned file (if `force` is false), or deleted (if `force` is true). + """ + if data: + self.write_file(what, filename, data) + elif os.path.exists(filename): + if data is None and not force: + log.warn("%s not set in setup(), but %s exists", what, filename) + return + else: + self.delete_file(filename) + + def write_file(self, what, filename, data): + """Write `data` to `filename` (if not a dry run) after announcing it + + `what` is used in a log message to identify what is being written + to the file. + """ + log.info("writing %s to %s", what, filename) + data = data.encode("utf-8") + if not self.dry_run: + f = open(filename, 'wb') + f.write(data) + f.close() + + def delete_file(self, filename): + """Delete `filename` (if not a dry run) after announcing it""" + log.info("deleting %s", filename) + if not self.dry_run: + os.unlink(filename) + + def run(self): + self.mkpath(self.egg_info) + try: + os.utime(self.egg_info, None) + except OSError as e: + msg = f"Cannot update time stamp of directory '{self.egg_info}'" + raise distutils.errors.DistutilsFileError(msg) from e + for ep in metadata.entry_points(group='egg_info.writers'): + writer = ep.load() + writer(self, ep.name, os.path.join(self.egg_info, ep.name)) + + # Get rid of native_libs.txt if it was put there by older bdist_egg + nl = os.path.join(self.egg_info, "native_libs.txt") + if os.path.exists(nl): + self.delete_file(nl) + + self.find_sources() + + def find_sources(self): + """Generate SOURCES.txt manifest file""" + manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") + mm = manifest_maker(self.distribution) + mm.ignore_egg_info_dir = self.ignore_egg_info_in_manifest + mm.manifest = manifest_filename + mm.run() + self.filelist = mm.filelist + + +class FileList(_FileList): + # Implementations of the various MANIFEST.in commands + + def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir=False): + super().__init__(warn, debug_print) + self.ignore_egg_info_dir = ignore_egg_info_dir + + def process_template_line(self, line): + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dir_pattern). + (action, patterns, dir, dir_pattern) = self._parse_template_line(line) + + action_map = { + 'include': self.include, + 'exclude': self.exclude, + 'global-include': self.global_include, + 'global-exclude': self.global_exclude, + 'recursive-include': functools.partial( + self.recursive_include, + dir, + ), + 'recursive-exclude': functools.partial( + self.recursive_exclude, + dir, + ), + 'graft': self.graft, + 'prune': self.prune, + } + log_map = { + 'include': "warning: no files found matching '%s'", + 'exclude': ("warning: no previously-included files found " "matching '%s'"), + 'global-include': ( + "warning: no files found matching '%s' " "anywhere in distribution" + ), + 'global-exclude': ( + "warning: no previously-included files matching " + "'%s' found anywhere in distribution" + ), + 'recursive-include': ( + "warning: no files found matching '%s' " "under directory '%s'" + ), + 'recursive-exclude': ( + "warning: no previously-included files matching " + "'%s' found under directory '%s'" + ), + 'graft': "warning: no directories found matching '%s'", + 'prune': "no previously-included directories found matching '%s'", + } + + try: + process_action = action_map[action] + except KeyError: + raise DistutilsInternalError( + "this cannot happen: invalid action '{action!s}'".format(action=action), + ) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + + action_is_recursive = action.startswith('recursive-') + if action in {'graft', 'prune'}: + patterns = [dir_pattern] + extra_log_args = (dir,) if action_is_recursive else () + log_tmpl = log_map[action] + + self.debug_print( + ' '.join( + [action] + ([dir] if action_is_recursive else []) + patterns, + ) + ) + for pattern in patterns: + if not process_action(pattern): + log.warn(log_tmpl, pattern, *extra_log_args) + + def _remove_files(self, predicate): + """ + Remove all files from the file list that match the predicate. + Return True if any matching files were removed + """ + found = False + for i in range(len(self.files) - 1, -1, -1): + if predicate(self.files[i]): + self.debug_print(" removing " + self.files[i]) + del self.files[i] + found = True + return found + + def include(self, pattern): + """Include files that match 'pattern'.""" + found = [f for f in glob(pattern) if not os.path.isdir(f)] + self.extend(found) + return bool(found) + + def exclude(self, pattern): + """Exclude files that match 'pattern'.""" + match = translate_pattern(pattern) + return self._remove_files(match.match) + + def recursive_include(self, dir, pattern): + """ + Include all files anywhere in 'dir/' that match the pattern. + """ + full_pattern = os.path.join(dir, '**', pattern) + found = [f for f in glob(full_pattern, recursive=True) if not os.path.isdir(f)] + self.extend(found) + return bool(found) + + def recursive_exclude(self, dir, pattern): + """ + Exclude any file anywhere in 'dir/' that match the pattern. + """ + match = translate_pattern(os.path.join(dir, '**', pattern)) + return self._remove_files(match.match) + + def graft(self, dir): + """Include all files from 'dir/'.""" + found = [ + item + for match_dir in glob(dir) + for item in distutils.filelist.findall(match_dir) + ] + self.extend(found) + return bool(found) + + def prune(self, dir): + """Filter out files from 'dir/'.""" + match = translate_pattern(os.path.join(dir, '**')) + return self._remove_files(match.match) + + def global_include(self, pattern): + """ + Include all files anywhere in the current directory that match the + pattern. This is very inefficient on large file trees. + """ + if self.allfiles is None: + self.findall() + match = translate_pattern(os.path.join('**', pattern)) + found = [f for f in self.allfiles if match.match(f)] + self.extend(found) + return bool(found) + + def global_exclude(self, pattern): + """ + Exclude all files anywhere that match the pattern. + """ + match = translate_pattern(os.path.join('**', pattern)) + return self._remove_files(match.match) + + def append(self, item): + if item.endswith('\r'): # Fix older sdists built on Windows + item = item[:-1] + path = convert_path(item) + + if self._safe_path(path): + self.files.append(path) + + def extend(self, paths): + self.files.extend(filter(self._safe_path, paths)) + + def _repair(self): + """ + Replace self.files with only safe paths + + Because some owners of FileList manipulate the underlying + ``files`` attribute directly, this method must be called to + repair those paths. + """ + self.files = list(filter(self._safe_path, self.files)) + + def _safe_path(self, path): + enc_warn = "'%s' not %s encodable -- skipping" + + # To avoid accidental trans-codings errors, first to unicode + u_path = unicode_utils.filesys_decode(path) + if u_path is None: + log.warn("'%s' in unexpected encoding -- skipping" % path) + return False + + # Must ensure utf-8 encodability + utf8_path = unicode_utils.try_encode(u_path, "utf-8") + if utf8_path is None: + log.warn(enc_warn, path, 'utf-8') + return False + + try: + # ignore egg-info paths + is_egg_info = ".egg-info" in u_path or b".egg-info" in utf8_path + if self.ignore_egg_info_dir and is_egg_info: + return False + # accept is either way checks out + if os.path.exists(u_path) or os.path.exists(utf8_path): + return True + # this will catch any encode errors decoding u_path + except UnicodeEncodeError: + log.warn(enc_warn, path, sys.getfilesystemencoding()) + + +class manifest_maker(sdist): + template = "MANIFEST.in" + + def initialize_options(self): + self.use_defaults = 1 + self.prune = 1 + self.manifest_only = 1 + self.force_manifest = 1 + self.ignore_egg_info_dir = False + + def finalize_options(self): + pass + + def run(self): + self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir) + if not os.path.exists(self.manifest): + self.write_manifest() # it must exist so it'll get in the list + self.add_defaults() + if os.path.exists(self.template): + self.read_template() + self.add_license_files() + self._add_referenced_files() + self.prune_file_list() + self.filelist.sort() + self.filelist.remove_duplicates() + self.write_manifest() + + def _manifest_normalize(self, path): + path = unicode_utils.filesys_decode(path) + return path.replace(os.sep, '/') + + def write_manifest(self): + """ + Write the file list in 'self.filelist' to the manifest file + named by 'self.manifest'. + """ + self.filelist._repair() + + # Now _repairs should encodability, but not unicode + files = [self._manifest_normalize(f) for f in self.filelist.files] + msg = "writing manifest file '%s'" % self.manifest + self.execute(write_file, (self.manifest, files), msg) + + def warn(self, msg): + if not self._should_suppress_warning(msg): + sdist.warn(self, msg) + + @staticmethod + def _should_suppress_warning(msg): + """ + suppress missing-file warnings from sdist + """ + return re.match(r"standard file .*not found", msg) + + def add_defaults(self): + sdist.add_defaults(self) + self.filelist.append(self.template) + self.filelist.append(self.manifest) + rcfiles = list(walk_revctrl()) + if rcfiles: + self.filelist.extend(rcfiles) + elif os.path.exists(self.manifest): + self.read_manifest() + + if os.path.exists("setup.py"): + # setup.py should be included by default, even if it's not + # the script called to create the sdist + self.filelist.append("setup.py") + + ei_cmd = self.get_finalized_command('egg_info') + self.filelist.graft(ei_cmd.egg_info) + + def add_license_files(self): + license_files = self.distribution.metadata.license_files or [] + for lf in license_files: + log.info("adding license file '%s'", lf) + self.filelist.extend(license_files) + + def _add_referenced_files(self): + """Add files referenced by the config (e.g. `file:` directive) to filelist""" + referenced = getattr(self.distribution, '_referenced_files', []) + # ^-- fallback if dist comes from distutils or is a custom class + for rf in referenced: + log.debug("adding file referenced by config '%s'", rf) + self.filelist.extend(referenced) + + def prune_file_list(self): + build = self.get_finalized_command('build') + base_dir = self.distribution.get_fullname() + self.filelist.prune(build.build_base) + self.filelist.prune(base_dir) + sep = re.escape(os.sep) + self.filelist.exclude_pattern( + r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=1 + ) + + def _safe_data_files(self, build_py): + """ + The parent class implementation of this method + (``sdist``) will try to include data files, which + might cause recursion problems when + ``include_package_data=True``. + + Therefore, avoid triggering any attempt of + analyzing/building the manifest again. + """ + if hasattr(build_py, 'get_data_files_without_manifest'): + return build_py.get_data_files_without_manifest() + + SetuptoolsDeprecationWarning.emit( + "`build_py` command does not inherit from setuptools' `build_py`.", + """ + Custom 'build_py' does not implement 'get_data_files_without_manifest'. + Please extend command classes from setuptools instead of distutils. + """, + see_url="https://peps.python.org/pep-0632/", + # due_date not defined yet, old projects might still do it? + ) + return build_py.get_data_files() + + +def write_file(filename, contents): + """Create a file with the specified name and write 'contents' (a + sequence of strings without line terminators) to it. + """ + contents = "\n".join(contents) + + # assuming the contents has been vetted for utf-8 encoding + contents = contents.encode("utf-8") + + with open(filename, "wb") as f: # always write POSIX-style manifest + f.write(contents) + + +def write_pkg_info(cmd, basename, filename): + log.info("writing %s", filename) + if not cmd.dry_run: + metadata = cmd.distribution.metadata + metadata.version, oldver = cmd.egg_version, metadata.version + metadata.name, oldname = cmd.egg_name, metadata.name + + try: + # write unescaped data to PKG-INFO, so older pkg_resources + # can still parse it + metadata.write_pkg_info(cmd.egg_info) + finally: + metadata.name, metadata.version = oldname, oldver + + safe = getattr(cmd.distribution, 'zip_safe', None) + + bdist_egg.write_safety_flag(cmd.egg_info, safe) + + +def warn_depends_obsolete(cmd, basename, filename): + """ + Unused: left to avoid errors when updating (from source) from <= 67.8. + Old installations have a .dist-info directory with the entry-point + ``depends.txt = setuptools.command.egg_info:warn_depends_obsolete``. + This may trigger errors when running the first egg_info in build_meta. + TODO: Remove this function in a version sufficiently > 68. + """ + + +# Export API used in entry_points +write_requirements = _requirestxt.write_requirements +write_setup_requirements = _requirestxt.write_setup_requirements + + +def write_toplevel_names(cmd, basename, filename): + pkgs = dict.fromkeys( + [k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names()] + ) + cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') + + +def overwrite_arg(cmd, basename, filename): + write_arg(cmd, basename, filename, True) + + +def write_arg(cmd, basename, filename, force=False): + argname = os.path.splitext(basename)[0] + value = getattr(cmd.distribution, argname, None) + if value is not None: + value = '\n'.join(value) + '\n' + cmd.write_or_delete_file(argname, filename, value, force) + + +def write_entries(cmd, basename, filename): + eps = _entry_points.load(cmd.distribution.entry_points) + defn = _entry_points.render(eps) + cmd.write_or_delete_file('entry points', filename, defn, True) + + +def _egg_basename(egg_name, egg_version, py_version=None, platform=None): + """Compute filename of the output egg. Private API.""" + name = _normalization.filename_component(egg_name) + version = _normalization.filename_component(egg_version) + egg = f"{name}-{version}-py{py_version or PY_MAJOR}" + if platform: + egg += f"-{platform}" + return egg + + +class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): + """Deprecated behavior warning for EggInfo, bypassing suppression.""" diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py index 8ccf73be98..67c4a4552f 100644 --- a/setuptools/extern/__init__.py +++ b/setuptools/extern/__init__.py @@ -1,6 +1,5 @@ import importlib.util import sys -from typing import TYPE_CHECKING class VendorImporter: @@ -83,15 +82,3 @@ def install(self): 'tomli', ) VendorImporter(__name__, names, 'setuptools._vendor').install() - -if TYPE_CHECKING: - import packaging as packaging - import packaging.version - import ordered_set as ordered_set - import more_itertools as more_itertools - import importlib_metadata as importlib_metadata - import zipp as zipp - import importlib_resources as importlib_resources - import jaraco as jaraco - import typing_extensions as typing_extensions - import tomli as tomli diff --git a/setuptools/extern/packaging/__init__.pyi b/setuptools/extern/packaging/__init__.pyi deleted file mode 100644 index 77224d3269..0000000000 --- a/setuptools/extern/packaging/__init__.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging import * diff --git a/setuptools/msvc.py b/setuptools/msvc.py index 10428b359c..ee38de756d 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -21,22 +21,21 @@ import itertools import subprocess import distutils.errors -import sys from setuptools.extern.more_itertools import unique_everseen -if sys.platform == "win32": +if platform.system() == 'Windows': import winreg from os import environ else: # Mock winreg and environ so the module can be imported on this platform. - class winreg: + class winreg: # type: ignore[no-redef] # https://github.com/python/mypy/issues/8166 HKEY_USERS = None HKEY_CURRENT_USER = None HKEY_LOCAL_MACHINE = None HKEY_CLASSES_ROOT = None - environ = dict() + environ = dict() # type: ignore[assignment] # https://github.com/python/mypy/issues/8166 def _msvc14_find_vc2015(): @@ -408,10 +407,10 @@ class RegistryInfo: """ HKEYS = ( - winreg.HKEY_USERS, - winreg.HKEY_CURRENT_USER, - winreg.HKEY_LOCAL_MACHINE, - winreg.HKEY_CLASSES_ROOT, + winreg.HKEY_USERS, # type: ignore[attr-defined] + winreg.HKEY_CURRENT_USER, # type: ignore[attr-defined] + winreg.HKEY_LOCAL_MACHINE, # type: ignore[attr-defined] + winreg.HKEY_CLASSES_ROOT, # type: ignore[attr-defined] ) def __init__(self, platform_info): diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py index abf4a383b0..9babcc5a36 100644 --- a/setuptools/sandbox.py +++ b/setuptools/sandbox.py @@ -264,52 +264,6 @@ def run_setup(setup_script, args): # Normal exit, just return -def _mk_dual_path_wrapper(name): - original = getattr(_os, name) - - def wrap(self: AbstractSandbox, src, dst, *args, **kw): - if self._active: - src, dst = self._remap_pair(name, src, dst, *args, **kw) - return original(src, dst, *args, **kw) - - return wrap - - -def _mk_single_path_wrapper(name, original=None): - original = original or getattr(_os, name) - - def wrap(self: AbstractSandbox, path, *args, **kw): - if self._active: - path = self._remap_input(name, path, *args, **kw) - return original(path, *args, **kw) - - return wrap - - -def _mk_single_with_return(name): - original = getattr(_os, name) - - def wrap(self: AbstractSandbox, path, *args, **kw): - if self._active: - path = self._remap_input(name, path, *args, **kw) - return self._remap_output(name, original(path, *args, **kw)) - return original(path, *args, **kw) - - return wrap - - -def _mk_query(name): - original = getattr(_os, name) - - def wrap(self: AbstractSandbox, *args, **kw): - retval = original(*args, **kw) - if self._active: - return self._remap_output(name, retval) - return retval - - return wrap - - class AbstractSandbox: """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" @@ -345,10 +299,30 @@ def run(self, func): with self: return func() + def _mk_dual_path_wrapper(name: str): # type: ignore[misc] # TODO: Extract or make static + original = getattr(_os, name) + + def wrap(self, src, dst, *args, **kw): + if self._active: + src, dst = self._remap_pair(name, src, dst, *args, **kw) + return original(src, dst, *args, **kw) + + return wrap + for name in ["rename", "link", "symlink"]: if hasattr(_os, name): locals()[name] = _mk_dual_path_wrapper(name) + def _mk_single_path_wrapper(name: str, original=None): # type: ignore[misc] # TODO: Extract or make static + original = original or getattr(_os, name) + + def wrap(self, path, *args, **kw): + if self._active: + path = self._remap_input(name, path, *args, **kw) + return original(path, *args, **kw) + + return wrap + if _file: _file = _mk_single_path_wrapper('file', _file) _open = _mk_single_path_wrapper('open', _open) @@ -376,10 +350,32 @@ def run(self, func): if hasattr(_os, name): locals()[name] = _mk_single_path_wrapper(name) + def _mk_single_with_return(name: str): # type: ignore[misc] # TODO: Extract or make static + original = getattr(_os, name) + + def wrap(self, path, *args, **kw): + if self._active: + path = self._remap_input(name, path, *args, **kw) + return self._remap_output(name, original(path, *args, **kw)) + return original(path, *args, **kw) + + return wrap + for name in ['readlink', 'tempnam']: if hasattr(_os, name): locals()[name] = _mk_single_with_return(name) + def _mk_query(name: str): # type: ignore[misc] # TODO: Extract or make static + original = getattr(_os, name) + + def wrap(self, *args, **kw): + retval = original(*args, **kw) + if self._active: + return self._remap_output(name, retval) + return retval + + return wrap + for name in ['getcwd', 'tmpnam']: if hasattr(_os, name): locals()[name] = _mk_query(name) diff --git a/setuptools/tests/_packaging_compat.py b/setuptools/tests/_packaging_compat.py index 7538ba5e07..5d48634ed8 100644 --- a/setuptools/tests/_packaging_compat.py +++ b/setuptools/tests/_packaging_compat.py @@ -1,6 +1,8 @@ +from typing import TYPE_CHECKING + from packaging import __version__ as packaging_version -if tuple(packaging_version.split(".")) >= ("23", "2"): +if TYPE_CHECKING or tuple(packaging_version.split(".")) >= ("23", "2"): from packaging.metadata import Metadata # type: ignore[attr-defined] else: # Just pretend it exists while waiting for release... diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py index f5a4872e92..94b94159d8 100644 --- a/setuptools/tests/integration/test_pip_install_sdist.py +++ b/setuptools/tests/integration/test_pip_install_sdist.py @@ -1,3 +1,5 @@ +# https://github.com/python/mypy/issues/8009#issuecomment-558335186 +# mypy: disable-error-code="has-type" """Integration tests for setuptools that focus on building packages via pip. The idea behind these tests is not to exhaustively check all the possible @@ -17,7 +19,6 @@ from enum import Enum from glob import glob from hashlib import md5 -from typing_extensions import reveal_type from urllib.request import urlopen import pytest @@ -25,15 +26,10 @@ from .helpers import Archive, run - pytestmark = pytest.mark.integration -class v(Enum): - LATEST = 1 - - -(LATEST,) = v +(LATEST,) = Enum("v", "LATEST") # type: ignore[misc] # https://github.com/python/mypy/issues/8009#issuecomment-558335186 """Default version to be checked""" # There are positive and negative aspects of checking the latest version of the # packages. diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py index 918767e347..85c5043500 100644 --- a/setuptools/tests/test_bdist_egg.py +++ b/setuptools/tests/test_bdist_egg.py @@ -47,7 +47,7 @@ def test_bdist_egg(self, setup_context, user_override): assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content) @pytest.mark.xfail( - os.environ.get('PYTHONDONTWRITEBYTECODE', False), + os.environ.get('PYTHONDONTWRITEBYTECODE'), # type: ignore[arg-type] # https://github.com/pytest-dev/pytest/issues/10094 reason="Byte code disabled", ) def test_exclude_source_files(self, setup_context, user_override): From 03afa45a96021487014008c76dc093b6c01461da Mon Sep 17 00:00:00 2001 From: Avasam Date: Sun, 22 Oct 2023 18:52:15 -0400 Subject: [PATCH 06/25] Fix accidental line ending changes --- pkg_resources/__init__.py | 5 +- pkg_resources/tests/test_resources.py | 1762 ++++++++++++------------- setuptools/_importlib.py | 102 +- setuptools/_normalization.py | 250 ++-- setuptools/command/egg_info.py | 1470 ++++++++++----------- 5 files changed, 1795 insertions(+), 1794 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 03bfad16f7..cc9f604128 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -2397,8 +2397,9 @@ def file_ns_handler(importer, path_item, packageName, module): register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) register_namespace_handler(zipimport.zipimporter, file_ns_handler) -if importlib_machinery: - register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) +# TODO: If importlib_machinery import fails, this will also fail. This should be fixed. +# https://github.com/pypa/setuptools/pull/3979/files#r1367959803 +register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) def null_ns_handler(importer, path_item, packageName, module): diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py index c42ac1231c..465292fab5 100644 --- a/pkg_resources/tests/test_resources.py +++ b/pkg_resources/tests/test_resources.py @@ -1,881 +1,881 @@ -import os -import sys -import string -import platform -import itertools - -import pytest -from pkg_resources.extern import packaging # type: ignore[attr-defined] - -import pkg_resources -from pkg_resources import ( - parse_requirements, - VersionConflict, - parse_version, - Distribution, - EntryPoint, - Requirement, - safe_version, - safe_name, - WorkingSet, -) - - -# from Python 3.6 docs. -def pairwise(iterable): - "s -> (s0,s1), (s1,s2), (s2, s3), ..." - a, b = itertools.tee(iterable) - next(b, None) - return zip(a, b) - - -class Metadata(pkg_resources.EmptyProvider): - """Mock object to return metadata as if from an on-disk distribution""" - - def __init__(self, *pairs): - self.metadata = dict(pairs) - - def has_metadata(self, name): - return name in self.metadata - - def get_metadata(self, name): - return self.metadata[name] - - def get_metadata_lines(self, name): - return pkg_resources.yield_lines(self.get_metadata(name)) - - -dist_from_fn = pkg_resources.Distribution.from_filename - - -class TestDistro: - def testCollection(self): - # empty path should produce no distributions - ad = pkg_resources.Environment([], platform=None, python=None) - assert list(ad) == [] - assert ad['FooPkg'] == [] - ad.add(dist_from_fn("FooPkg-1.3_1.egg")) - ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg")) - ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg")) - - # Name is in there now - assert ad['FooPkg'] - # But only 1 package - assert list(ad) == ['foopkg'] - - # Distributions sort by version - expected = ['1.4', '1.3-1', '1.2'] - assert [dist.version for dist in ad['FooPkg']] == expected - - # Removing a distribution leaves sequence alone - ad.remove(ad['FooPkg'][1]) - assert [dist.version for dist in ad['FooPkg']] == ['1.4', '1.2'] - - # And inserting adds them in order - ad.add(dist_from_fn("FooPkg-1.9.egg")) - assert [dist.version for dist in ad['FooPkg']] == ['1.9', '1.4', '1.2'] - - ws = WorkingSet([]) - foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg") - foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg") - (req,) = parse_requirements("FooPkg>=1.3") - - # Nominal case: no distros on path, should yield all applicable - assert ad.best_match(req, ws).version == '1.9' - # If a matching distro is already installed, should return only that - ws.add(foo14) - assert ad.best_match(req, ws).version == '1.4' - - # If the first matching distro is unsuitable, it's a version conflict - ws = WorkingSet([]) - ws.add(foo12) - ws.add(foo14) - with pytest.raises(VersionConflict): - ad.best_match(req, ws) - - # If more than one match on the path, the first one takes precedence - ws = WorkingSet([]) - ws.add(foo14) - ws.add(foo12) - ws.add(foo14) - assert ad.best_match(req, ws).version == '1.4' - - def checkFooPkg(self, d): - assert d.project_name == "FooPkg" - assert d.key == "foopkg" - assert d.version == "1.3.post1" - assert d.py_version == "2.4" - assert d.platform == "win32" - assert d.parsed_version == parse_version("1.3-1") - - def testDistroBasics(self): - d = Distribution( - "/some/path", - project_name="FooPkg", - version="1.3-1", - py_version="2.4", - platform="win32", - ) - self.checkFooPkg(d) - - d = Distribution("/some/path") - assert d.py_version == '{}.{}'.format(*sys.version_info) - assert d.platform is None - - def testDistroParse(self): - d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg") - self.checkFooPkg(d) - d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg-info") - self.checkFooPkg(d) - - def testDistroMetadata(self): - d = Distribution( - "/some/path", - project_name="FooPkg", - py_version="2.4", - platform="win32", - metadata=Metadata(('PKG-INFO', "Metadata-Version: 1.0\nVersion: 1.3-1\n")), - ) - self.checkFooPkg(d) - - def distRequires(self, txt): - return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) - - def checkRequires(self, dist, txt, extras=()): - assert list(dist.requires(extras)) == list(parse_requirements(txt)) - - def testDistroDependsSimple(self): - for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": - self.checkRequires(self.distRequires(v), v) - - needs_object_dir = pytest.mark.skipif( - not hasattr(object, '__dir__'), - reason='object.__dir__ necessary for self.__dir__ implementation', - ) - - def test_distribution_dir(self): - d = pkg_resources.Distribution() - dir(d) - - @needs_object_dir - def test_distribution_dir_includes_provider_dir(self): - d = pkg_resources.Distribution() - before = d.__dir__() - assert 'test_attr' not in before - d._provider.test_attr = None - after = d.__dir__() - assert len(after) == len(before) + 1 - assert 'test_attr' in after - - @needs_object_dir - def test_distribution_dir_ignores_provider_dir_leading_underscore(self): - d = pkg_resources.Distribution() - before = d.__dir__() - assert '_test_attr' not in before - d._provider._test_attr = None - after = d.__dir__() - assert len(after) == len(before) - assert '_test_attr' not in after - - def testResolve(self): - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - # Resolving no requirements -> nothing to install - assert list(ws.resolve([], ad)) == [] - # Request something not in the collection -> DistributionNotFound - with pytest.raises(pkg_resources.DistributionNotFound): - ws.resolve(parse_requirements("Foo"), ad) - - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.egg", - metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")), - ) - ad.add(Foo) - ad.add(Distribution.from_filename("Foo-0.9.egg")) - - # Request thing(s) that are available -> list to activate - for i in range(3): - targets = list(ws.resolve(parse_requirements("Foo"), ad)) - assert targets == [Foo] - list(map(ws.add, targets)) - with pytest.raises(VersionConflict): - ws.resolve(parse_requirements("Foo==0.9"), ad) - ws = WorkingSet([]) # reset - - # Request an extra that causes an unresolved dependency for "Baz" - with pytest.raises(pkg_resources.DistributionNotFound): - ws.resolve(parse_requirements("Foo[bar]"), ad) - Baz = Distribution.from_filename( - "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) - ) - ad.add(Baz) - - # Activation list now includes resolved dependency - assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) == [Foo, Baz] - # Requests for conflicting versions produce VersionConflict - with pytest.raises(VersionConflict) as vc: - ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad) - - msg = 'Foo 0.9 is installed but Foo==1.2 is required' - assert vc.value.report() == msg - - def test_environment_marker_evaluation_negative(self): - """Environment markers are evaluated at resolution time.""" - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - res = ws.resolve(parse_requirements("Foo;python_version<'2'"), ad) - assert list(res) == [] - - def test_environment_marker_evaluation_positive(self): - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - Foo = Distribution.from_filename("/foo_dir/Foo-1.2.dist-info") - ad.add(Foo) - res = ws.resolve(parse_requirements("Foo;python_version>='2'"), ad) - assert list(res) == [Foo] - - def test_environment_marker_evaluation_called(self): - """ - If one package foo requires bar without any extras, - markers should pass for bar without extras. - """ - (parent_req,) = parse_requirements("foo") - (req,) = parse_requirements("bar;python_version>='2'") - req_extras = pkg_resources._ReqExtras({req: parent_req.extras}) - assert req_extras.markers_pass(req) - - (parent_req,) = parse_requirements("foo[]") - (req,) = parse_requirements("bar;python_version>='2'") - req_extras = pkg_resources._ReqExtras({req: parent_req.extras}) - assert req_extras.markers_pass(req) - - def test_marker_evaluation_with_extras(self): - """Extras are also evaluated as markers at resolution time.""" - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.dist-info", - metadata=Metadata( - ( - "METADATA", - "Provides-Extra: baz\n" "Requires-Dist: quux; extra=='baz'", - ) - ), - ) - ad.add(Foo) - assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] - quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") - ad.add(quux) - res = list(ws.resolve(parse_requirements("Foo[baz]"), ad)) - assert res == [Foo, quux] - - def test_marker_evaluation_with_extras_normlized(self): - """Extras are also evaluated as markers at resolution time.""" - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.dist-info", - metadata=Metadata( - ( - "METADATA", - "Provides-Extra: baz-lightyear\n" - "Requires-Dist: quux; extra=='baz-lightyear'", - ) - ), - ) - ad.add(Foo) - assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] - quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") - ad.add(quux) - res = list(ws.resolve(parse_requirements("Foo[baz-lightyear]"), ad)) - assert res == [Foo, quux] - - def test_marker_evaluation_with_multiple_extras(self): - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.dist-info", - metadata=Metadata( - ( - "METADATA", - "Provides-Extra: baz\n" - "Requires-Dist: quux; extra=='baz'\n" - "Provides-Extra: bar\n" - "Requires-Dist: fred; extra=='bar'\n", - ) - ), - ) - ad.add(Foo) - quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") - ad.add(quux) - fred = Distribution.from_filename("/foo_dir/fred-0.1.dist-info") - ad.add(fred) - res = list(ws.resolve(parse_requirements("Foo[baz,bar]"), ad)) - assert sorted(res) == [fred, quux, Foo] - - def test_marker_evaluation_with_extras_loop(self): - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - a = Distribution.from_filename( - "/foo_dir/a-0.2.dist-info", - metadata=Metadata(("METADATA", "Requires-Dist: c[a]")), - ) - b = Distribution.from_filename( - "/foo_dir/b-0.3.dist-info", - metadata=Metadata(("METADATA", "Requires-Dist: c[b]")), - ) - c = Distribution.from_filename( - "/foo_dir/c-1.0.dist-info", - metadata=Metadata( - ( - "METADATA", - "Provides-Extra: a\n" - "Requires-Dist: b;extra=='a'\n" - "Provides-Extra: b\n" - "Requires-Dist: foo;extra=='b'", - ) - ), - ) - foo = Distribution.from_filename("/foo_dir/foo-0.1.dist-info") - for dist in (a, b, c, foo): - ad.add(dist) - res = list(ws.resolve(parse_requirements("a"), ad)) - assert res == [a, c, b, foo] - - @pytest.mark.xfail( - sys.version_info[:2] == (3, 12) and sys.version_info.releaselevel != 'final', - reason="https://github.com/python/cpython/issues/103632", - ) - def testDistroDependsOptions(self): - d = self.distRequires( - """ - Twisted>=1.5 - [docgen] - ZConfig>=2.0 - docutils>=0.3 - [fastcgi] - fcgiapp>=0.1""" - ) - self.checkRequires(d, "Twisted>=1.5") - self.checkRequires( - d, "Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] - ) - self.checkRequires(d, "Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]) - self.checkRequires( - d, - "Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), - ["docgen", "fastcgi"], - ) - self.checkRequires( - d, - "Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), - ["fastcgi", "docgen"], - ) - with pytest.raises(pkg_resources.UnknownExtra): - d.requires(["foo"]) - - -class TestWorkingSet: - def test_find_conflicting(self): - ws = WorkingSet([]) - Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg") - ws.add(Foo) - - # create a requirement that conflicts with Foo 1.2 - req = next(parse_requirements("Foo<1.2")) - - with pytest.raises(VersionConflict) as vc: - ws.find(req) - - msg = 'Foo 1.2 is installed but Foo<1.2 is required' - assert vc.value.report() == msg - - def test_resolve_conflicts_with_prior(self): - """ - A ContextualVersionConflict should be raised when a requirement - conflicts with a prior requirement for a different package. - """ - # Create installation where Foo depends on Baz 1.0 and Bar depends on - # Baz 2.0. - ws = WorkingSet([]) - md = Metadata(('depends.txt', "Baz==1.0")) - Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md) - ws.add(Foo) - md = Metadata(('depends.txt', "Baz==2.0")) - Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md) - ws.add(Bar) - Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg") - ws.add(Baz) - Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg") - ws.add(Baz) - - with pytest.raises(VersionConflict) as vc: - ws.resolve(parse_requirements("Foo\nBar\n")) - - msg = "Baz 1.0 is installed but Baz==2.0 is required by " - msg += repr(set(['Bar'])) - assert vc.value.report() == msg - - -class TestEntryPoints: - def assertfields(self, ep): - assert ep.name == "foo" - assert ep.module_name == "pkg_resources.tests.test_resources" - assert ep.attrs == ("TestEntryPoints",) - assert ep.extras == ("x",) - assert ep.load() is TestEntryPoints - expect = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]" - assert str(ep) == expect - - def setup_method(self, method): - self.dist = Distribution.from_filename( - "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt', '[x]')) - ) - - def testBasics(self): - ep = EntryPoint( - "foo", - "pkg_resources.tests.test_resources", - ["TestEntryPoints"], - ["x"], - self.dist, - ) - self.assertfields(ep) - - def testParse(self): - s = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]" - ep = EntryPoint.parse(s, self.dist) - self.assertfields(ep) - - ep = EntryPoint.parse("bar baz= spammity[PING]") - assert ep.name == "bar baz" - assert ep.module_name == "spammity" - assert ep.attrs == () - assert ep.extras == ("ping",) - - ep = EntryPoint.parse(" fizzly = wocka:foo") - assert ep.name == "fizzly" - assert ep.module_name == "wocka" - assert ep.attrs == ("foo",) - assert ep.extras == () - - # plus in the name - spec = "html+mako = mako.ext.pygmentplugin:MakoHtmlLexer" - ep = EntryPoint.parse(spec) - assert ep.name == 'html+mako' - - reject_specs = "foo", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2" - - @pytest.mark.parametrize("reject_spec", reject_specs) - def test_reject_spec(self, reject_spec): - with pytest.raises(ValueError): - EntryPoint.parse(reject_spec) - - def test_printable_name(self): - """ - Allow any printable character in the name. - """ - # Create a name with all printable characters; strip the whitespace. - name = string.printable.strip() - spec = "{name} = module:attr".format(**locals()) - ep = EntryPoint.parse(spec) - assert ep.name == name - - def checkSubMap(self, m): - assert len(m) == len(self.submap_expect) - for key, ep in self.submap_expect.items(): - assert m.get(key).name == ep.name - assert m.get(key).module_name == ep.module_name - assert sorted(m.get(key).attrs) == sorted(ep.attrs) - assert sorted(m.get(key).extras) == sorted(ep.extras) - - submap_expect = dict( - feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), - feature2=EntryPoint( - 'feature2', 'another.module', ['SomeClass'], ['extra1', 'extra2'] - ), - feature3=EntryPoint('feature3', 'this.module', extras=['something']), - ) - submap_str = """ - # define features for blah blah - feature1 = somemodule:somefunction - feature2 = another.module:SomeClass [extra1,extra2] - feature3 = this.module [something] - """ - - def testParseList(self): - self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) - with pytest.raises(ValueError): - EntryPoint.parse_group("x a", "foo=bar") - with pytest.raises(ValueError): - EntryPoint.parse_group("x", ["foo=baz", "foo=bar"]) - - def testParseMap(self): - m = EntryPoint.parse_map({'xyz': self.submap_str}) - self.checkSubMap(m['xyz']) - assert list(m.keys()) == ['xyz'] - m = EntryPoint.parse_map("[xyz]\n" + self.submap_str) - self.checkSubMap(m['xyz']) - assert list(m.keys()) == ['xyz'] - with pytest.raises(ValueError): - EntryPoint.parse_map(["[xyz]", "[xyz]"]) - with pytest.raises(ValueError): - EntryPoint.parse_map(self.submap_str) - - def testDeprecationWarnings(self): - ep = EntryPoint( - "foo", "pkg_resources.tests.test_resources", ["TestEntryPoints"], ["x"] - ) - with pytest.warns(pkg_resources.PkgResourcesDeprecationWarning): - ep.load(require=False) - - -class TestRequirements: - def testBasics(self): - r = Requirement.parse("Twisted>=1.2") - assert str(r) == "Twisted>=1.2" - assert repr(r) == "Requirement.parse('Twisted>=1.2')" - assert r == Requirement("Twisted>=1.2") - assert r == Requirement("twisTed>=1.2") - assert r != Requirement("Twisted>=2.0") - assert r != Requirement("Zope>=1.2") - assert r != Requirement("Zope>=3.0") - assert r != Requirement("Twisted[extras]>=1.2") - - def testOrdering(self): - r1 = Requirement("Twisted==1.2c1,>=1.2") - r2 = Requirement("Twisted>=1.2,==1.2c1") - assert r1 == r2 - assert str(r1) == str(r2) - assert str(r2) == "Twisted==1.2c1,>=1.2" - assert Requirement("Twisted") != Requirement( - "Twisted @ https://localhost/twisted.zip" - ) - - def testBasicContains(self): - r = Requirement("Twisted>=1.2") - foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") - twist11 = Distribution.from_filename("Twisted-1.1.egg") - twist12 = Distribution.from_filename("Twisted-1.2.egg") - assert parse_version('1.2') in r - assert parse_version('1.1') not in r - assert '1.2' in r - assert '1.1' not in r - assert foo_dist not in r - assert twist11 not in r - assert twist12 in r - - def testOptionsAndHashing(self): - r1 = Requirement.parse("Twisted[foo,bar]>=1.2") - r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") - assert r1 == r2 - assert set(r1.extras) == set(("foo", "bar")) - assert set(r2.extras) == set(("foo", "bar")) - assert hash(r1) == hash(r2) - assert hash(r1) == hash( - ( - "twisted", - None, - packaging.specifiers.SpecifierSet(">=1.2"), - frozenset(["foo", "bar"]), - None, - ) - ) - assert hash( - Requirement.parse("Twisted @ https://localhost/twisted.zip") - ) == hash( - ( - "twisted", - "https://localhost/twisted.zip", - packaging.specifiers.SpecifierSet(), - frozenset(), - None, - ) - ) - - def testVersionEquality(self): - r1 = Requirement.parse("foo==0.3a2") - r2 = Requirement.parse("foo!=0.3a4") - d = Distribution.from_filename - - assert d("foo-0.3a4.egg") not in r1 - assert d("foo-0.3a1.egg") not in r1 - assert d("foo-0.3a4.egg") not in r2 - - assert d("foo-0.3a2.egg") in r1 - assert d("foo-0.3a2.egg") in r2 - assert d("foo-0.3a3.egg") in r2 - assert d("foo-0.3a5.egg") in r2 - - def testSetuptoolsProjectName(self): - """ - The setuptools project should implement the setuptools package. - """ - - assert Requirement.parse('setuptools').project_name == 'setuptools' - # setuptools 0.7 and higher means setuptools. - assert Requirement.parse('setuptools == 0.7').project_name == 'setuptools' - assert Requirement.parse('setuptools == 0.7a1').project_name == 'setuptools' - assert Requirement.parse('setuptools >= 0.7').project_name == 'setuptools' - - -class TestParsing: - def testEmptyParse(self): - assert list(parse_requirements('')) == [] - - def testYielding(self): - for inp, out in [ - ([], []), - ('x', ['x']), - ([[]], []), - (' x\n y', ['x', 'y']), - (['x\n\n', 'y'], ['x', 'y']), - ]: - assert list(pkg_resources.yield_lines(inp)) == out - - def testSplitting(self): - sample = """ - x - [Y] - z - - a - [b ] - # foo - c - [ d] - [q] - v - """ - assert list(pkg_resources.split_sections(sample)) == [ - (None, ["x"]), - ("Y", ["z", "a"]), - ("b", ["c"]), - ("d", []), - ("q", ["v"]), - ] - with pytest.raises(ValueError): - list(pkg_resources.split_sections("[foo")) - - def testSafeName(self): - assert safe_name("adns-python") == "adns-python" - assert safe_name("WSGI Utils") == "WSGI-Utils" - assert safe_name("WSGI Utils") == "WSGI-Utils" - assert safe_name("Money$$$Maker") == "Money-Maker" - assert safe_name("peak.web") != "peak-web" - - def testSafeVersion(self): - assert safe_version("1.2-1") == "1.2.post1" - assert safe_version("1.2 alpha") == "1.2.alpha" - assert safe_version("2.3.4 20050521") == "2.3.4.20050521" - assert safe_version("Money$$$Maker") == "Money-Maker" - assert safe_version("peak.web") == "peak.web" - - def testSimpleRequirements(self): - assert list(parse_requirements('Twis-Ted>=1.2-1')) == [ - Requirement('Twis-Ted>=1.2-1') - ] - assert list(parse_requirements('Twisted >=1.2, \\ # more\n<2.0')) == [ - Requirement('Twisted>=1.2,<2.0') - ] - assert Requirement.parse("FooBar==1.99a3") == Requirement("FooBar==1.99a3") - with pytest.raises(ValueError): - Requirement.parse(">=2.3") - with pytest.raises(ValueError): - Requirement.parse("x\\") - with pytest.raises(ValueError): - Requirement.parse("x==2 q") - with pytest.raises(ValueError): - Requirement.parse("X==1\nY==2") - with pytest.raises(ValueError): - Requirement.parse("#") - - def test_requirements_with_markers(self): - assert Requirement.parse("foobar;os_name=='a'") == Requirement.parse( - "foobar;os_name=='a'" - ) - assert Requirement.parse( - "name==1.1;python_version=='2.7'" - ) != Requirement.parse("name==1.1;python_version=='3.6'") - assert Requirement.parse( - "name==1.0;python_version=='2.7'" - ) != Requirement.parse("name==1.2;python_version=='2.7'") - assert Requirement.parse( - "name[foo]==1.0;python_version=='3.6'" - ) != Requirement.parse("name[foo,bar]==1.0;python_version=='3.6'") - - def test_local_version(self): - (req,) = parse_requirements('foo==1.0+org1') - - def test_spaces_between_multiple_versions(self): - (req,) = parse_requirements('foo>=1.0, <3') - (req,) = parse_requirements('foo >= 1.0, < 3') - - @pytest.mark.parametrize( - ['lower', 'upper'], - [ - ('1.2-rc1', '1.2rc1'), - ('0.4', '0.4.0'), - ('0.4.0.0', '0.4.0'), - ('0.4.0-0', '0.4-0'), - ('0post1', '0.0post1'), - ('0pre1', '0.0c1'), - ('0.0.0preview1', '0c1'), - ('0.0c1', '0-rc1'), - ('1.2a1', '1.2.a.1'), - ('1.2.a', '1.2a'), - ], - ) - def testVersionEquality(self, lower, upper): - assert parse_version(lower) == parse_version(upper) - - torture = """ - 0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1 - 0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2 - 0.77.2-1 0.77.1-1 0.77.0-1 - """ - - @pytest.mark.parametrize( - ['lower', 'upper'], - [ - ('2.1', '2.1.1'), - ('2a1', '2b0'), - ('2a1', '2.1'), - ('2.3a1', '2.3'), - ('2.1-1', '2.1-2'), - ('2.1-1', '2.1.1'), - ('2.1', '2.1post4'), - ('2.1a0-20040501', '2.1'), - ('1.1', '02.1'), - ('3.2', '3.2.post0'), - ('3.2post1', '3.2post2'), - ('0.4', '4.0'), - ('0.0.4', '0.4.0'), - ('0post1', '0.4post1'), - ('2.1.0-rc1', '2.1.0'), - ('2.1dev', '2.1a0'), - ] - + list(pairwise(reversed(torture.split()))), - ) - def testVersionOrdering(self, lower, upper): - assert parse_version(lower) < parse_version(upper) - - def testVersionHashable(self): - """ - Ensure that our versions stay hashable even though we've subclassed - them and added some shim code to them. - """ - assert hash(parse_version("1.0")) == hash(parse_version("1.0")) - - -class TestNamespaces: - ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n" - - @pytest.fixture - def symlinked_tmpdir(self, tmpdir): - """ - Where available, return the tempdir as a symlink, - which as revealed in #231 is more fragile than - a natural tempdir. - """ - if not hasattr(os, 'symlink'): - yield str(tmpdir) - return - - link_name = str(tmpdir) + '-linked' - os.symlink(str(tmpdir), link_name) - try: - yield type(tmpdir)(link_name) - finally: - os.unlink(link_name) - - @pytest.fixture(autouse=True) - def patched_path(self, tmpdir): - """ - Patch sys.path to include the 'site-pkgs' dir. Also - restore pkg_resources._namespace_packages to its - former state. - """ - saved_ns_pkgs = pkg_resources._namespace_packages.copy() - saved_sys_path = sys.path[:] - site_pkgs = tmpdir.mkdir('site-pkgs') - sys.path.append(str(site_pkgs)) - try: - yield - finally: - pkg_resources._namespace_packages = saved_ns_pkgs - sys.path = saved_sys_path - - issue591 = pytest.mark.xfail(platform.system() == 'Windows', reason="#591") - - @issue591 - def test_two_levels_deep(self, symlinked_tmpdir): - """ - Test nested namespace packages - Create namespace packages in the following tree : - site-packages-1/pkg1/pkg2 - site-packages-2/pkg1/pkg2 - Check both are in the _namespace_packages dict and that their __path__ - is correct - """ - real_tmpdir = symlinked_tmpdir.realpath() - tmpdir = symlinked_tmpdir - sys.path.append(str(tmpdir / 'site-pkgs2')) - site_dirs = tmpdir / 'site-pkgs', tmpdir / 'site-pkgs2' - for site in site_dirs: - pkg1 = site / 'pkg1' - pkg2 = pkg1 / 'pkg2' - pkg2.ensure_dir() - (pkg1 / '__init__.py').write_text(self.ns_str, encoding='utf-8') - (pkg2 / '__init__.py').write_text(self.ns_str, encoding='utf-8') - with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): - import pkg1 - assert "pkg1" in pkg_resources._namespace_packages - # attempt to import pkg2 from site-pkgs2 - with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): - import pkg1.pkg2 - # check the _namespace_packages dict - assert "pkg1.pkg2" in pkg_resources._namespace_packages - assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"] - # check the __path__ attribute contains both paths - expected = [ - str(real_tmpdir / "site-pkgs" / "pkg1" / "pkg2"), - str(real_tmpdir / "site-pkgs2" / "pkg1" / "pkg2"), - ] - assert pkg1.pkg2.__path__ == expected - - @issue591 - def test_path_order(self, symlinked_tmpdir): - """ - Test that if multiple versions of the same namespace package subpackage - are on different sys.path entries, that only the one earliest on - sys.path is imported, and that the namespace package's __path__ is in - the correct order. - - Regression test for https://github.com/pypa/setuptools/issues/207 - """ - - tmpdir = symlinked_tmpdir - site_dirs = ( - tmpdir / "site-pkgs", - tmpdir / "site-pkgs2", - tmpdir / "site-pkgs3", - ) - - vers_str = "__version__ = %r" - - for number, site in enumerate(site_dirs, 1): - if number > 1: - sys.path.append(str(site)) - nspkg = site / 'nspkg' - subpkg = nspkg / 'subpkg' - subpkg.ensure_dir() - (nspkg / '__init__.py').write_text(self.ns_str, encoding='utf-8') - (subpkg / '__init__.py').write_text(vers_str % number, encoding='utf-8') - - with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): - import nspkg.subpkg - import nspkg - expected = [str(site.realpath() / 'nspkg') for site in site_dirs] - assert nspkg.__path__ == expected - assert nspkg.subpkg.__version__ == 1 +import os +import sys +import string +import platform +import itertools + +import pytest +from pkg_resources.extern import packaging # type: ignore[attr-defined] + +import pkg_resources +from pkg_resources import ( + parse_requirements, + VersionConflict, + parse_version, + Distribution, + EntryPoint, + Requirement, + safe_version, + safe_name, + WorkingSet, +) + + +# from Python 3.6 docs. +def pairwise(iterable): + "s -> (s0,s1), (s1,s2), (s2, s3), ..." + a, b = itertools.tee(iterable) + next(b, None) + return zip(a, b) + + +class Metadata(pkg_resources.EmptyProvider): + """Mock object to return metadata as if from an on-disk distribution""" + + def __init__(self, *pairs): + self.metadata = dict(pairs) + + def has_metadata(self, name): + return name in self.metadata + + def get_metadata(self, name): + return self.metadata[name] + + def get_metadata_lines(self, name): + return pkg_resources.yield_lines(self.get_metadata(name)) + + +dist_from_fn = pkg_resources.Distribution.from_filename + + +class TestDistro: + def testCollection(self): + # empty path should produce no distributions + ad = pkg_resources.Environment([], platform=None, python=None) + assert list(ad) == [] + assert ad['FooPkg'] == [] + ad.add(dist_from_fn("FooPkg-1.3_1.egg")) + ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg")) + ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg")) + + # Name is in there now + assert ad['FooPkg'] + # But only 1 package + assert list(ad) == ['foopkg'] + + # Distributions sort by version + expected = ['1.4', '1.3-1', '1.2'] + assert [dist.version for dist in ad['FooPkg']] == expected + + # Removing a distribution leaves sequence alone + ad.remove(ad['FooPkg'][1]) + assert [dist.version for dist in ad['FooPkg']] == ['1.4', '1.2'] + + # And inserting adds them in order + ad.add(dist_from_fn("FooPkg-1.9.egg")) + assert [dist.version for dist in ad['FooPkg']] == ['1.9', '1.4', '1.2'] + + ws = WorkingSet([]) + foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg") + foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg") + (req,) = parse_requirements("FooPkg>=1.3") + + # Nominal case: no distros on path, should yield all applicable + assert ad.best_match(req, ws).version == '1.9' + # If a matching distro is already installed, should return only that + ws.add(foo14) + assert ad.best_match(req, ws).version == '1.4' + + # If the first matching distro is unsuitable, it's a version conflict + ws = WorkingSet([]) + ws.add(foo12) + ws.add(foo14) + with pytest.raises(VersionConflict): + ad.best_match(req, ws) + + # If more than one match on the path, the first one takes precedence + ws = WorkingSet([]) + ws.add(foo14) + ws.add(foo12) + ws.add(foo14) + assert ad.best_match(req, ws).version == '1.4' + + def checkFooPkg(self, d): + assert d.project_name == "FooPkg" + assert d.key == "foopkg" + assert d.version == "1.3.post1" + assert d.py_version == "2.4" + assert d.platform == "win32" + assert d.parsed_version == parse_version("1.3-1") + + def testDistroBasics(self): + d = Distribution( + "/some/path", + project_name="FooPkg", + version="1.3-1", + py_version="2.4", + platform="win32", + ) + self.checkFooPkg(d) + + d = Distribution("/some/path") + assert d.py_version == '{}.{}'.format(*sys.version_info) + assert d.platform is None + + def testDistroParse(self): + d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg") + self.checkFooPkg(d) + d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg-info") + self.checkFooPkg(d) + + def testDistroMetadata(self): + d = Distribution( + "/some/path", + project_name="FooPkg", + py_version="2.4", + platform="win32", + metadata=Metadata(('PKG-INFO', "Metadata-Version: 1.0\nVersion: 1.3-1\n")), + ) + self.checkFooPkg(d) + + def distRequires(self, txt): + return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) + + def checkRequires(self, dist, txt, extras=()): + assert list(dist.requires(extras)) == list(parse_requirements(txt)) + + def testDistroDependsSimple(self): + for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": + self.checkRequires(self.distRequires(v), v) + + needs_object_dir = pytest.mark.skipif( + not hasattr(object, '__dir__'), + reason='object.__dir__ necessary for self.__dir__ implementation', + ) + + def test_distribution_dir(self): + d = pkg_resources.Distribution() + dir(d) + + @needs_object_dir + def test_distribution_dir_includes_provider_dir(self): + d = pkg_resources.Distribution() + before = d.__dir__() + assert 'test_attr' not in before + d._provider.test_attr = None + after = d.__dir__() + assert len(after) == len(before) + 1 + assert 'test_attr' in after + + @needs_object_dir + def test_distribution_dir_ignores_provider_dir_leading_underscore(self): + d = pkg_resources.Distribution() + before = d.__dir__() + assert '_test_attr' not in before + d._provider._test_attr = None + after = d.__dir__() + assert len(after) == len(before) + assert '_test_attr' not in after + + def testResolve(self): + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + # Resolving no requirements -> nothing to install + assert list(ws.resolve([], ad)) == [] + # Request something not in the collection -> DistributionNotFound + with pytest.raises(pkg_resources.DistributionNotFound): + ws.resolve(parse_requirements("Foo"), ad) + + Foo = Distribution.from_filename( + "/foo_dir/Foo-1.2.egg", + metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")), + ) + ad.add(Foo) + ad.add(Distribution.from_filename("Foo-0.9.egg")) + + # Request thing(s) that are available -> list to activate + for i in range(3): + targets = list(ws.resolve(parse_requirements("Foo"), ad)) + assert targets == [Foo] + list(map(ws.add, targets)) + with pytest.raises(VersionConflict): + ws.resolve(parse_requirements("Foo==0.9"), ad) + ws = WorkingSet([]) # reset + + # Request an extra that causes an unresolved dependency for "Baz" + with pytest.raises(pkg_resources.DistributionNotFound): + ws.resolve(parse_requirements("Foo[bar]"), ad) + Baz = Distribution.from_filename( + "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) + ) + ad.add(Baz) + + # Activation list now includes resolved dependency + assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) == [Foo, Baz] + # Requests for conflicting versions produce VersionConflict + with pytest.raises(VersionConflict) as vc: + ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad) + + msg = 'Foo 0.9 is installed but Foo==1.2 is required' + assert vc.value.report() == msg + + def test_environment_marker_evaluation_negative(self): + """Environment markers are evaluated at resolution time.""" + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + res = ws.resolve(parse_requirements("Foo;python_version<'2'"), ad) + assert list(res) == [] + + def test_environment_marker_evaluation_positive(self): + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + Foo = Distribution.from_filename("/foo_dir/Foo-1.2.dist-info") + ad.add(Foo) + res = ws.resolve(parse_requirements("Foo;python_version>='2'"), ad) + assert list(res) == [Foo] + + def test_environment_marker_evaluation_called(self): + """ + If one package foo requires bar without any extras, + markers should pass for bar without extras. + """ + (parent_req,) = parse_requirements("foo") + (req,) = parse_requirements("bar;python_version>='2'") + req_extras = pkg_resources._ReqExtras({req: parent_req.extras}) + assert req_extras.markers_pass(req) + + (parent_req,) = parse_requirements("foo[]") + (req,) = parse_requirements("bar;python_version>='2'") + req_extras = pkg_resources._ReqExtras({req: parent_req.extras}) + assert req_extras.markers_pass(req) + + def test_marker_evaluation_with_extras(self): + """Extras are also evaluated as markers at resolution time.""" + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + Foo = Distribution.from_filename( + "/foo_dir/Foo-1.2.dist-info", + metadata=Metadata( + ( + "METADATA", + "Provides-Extra: baz\n" "Requires-Dist: quux; extra=='baz'", + ) + ), + ) + ad.add(Foo) + assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] + quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") + ad.add(quux) + res = list(ws.resolve(parse_requirements("Foo[baz]"), ad)) + assert res == [Foo, quux] + + def test_marker_evaluation_with_extras_normlized(self): + """Extras are also evaluated as markers at resolution time.""" + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + Foo = Distribution.from_filename( + "/foo_dir/Foo-1.2.dist-info", + metadata=Metadata( + ( + "METADATA", + "Provides-Extra: baz-lightyear\n" + "Requires-Dist: quux; extra=='baz-lightyear'", + ) + ), + ) + ad.add(Foo) + assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] + quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") + ad.add(quux) + res = list(ws.resolve(parse_requirements("Foo[baz-lightyear]"), ad)) + assert res == [Foo, quux] + + def test_marker_evaluation_with_multiple_extras(self): + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + Foo = Distribution.from_filename( + "/foo_dir/Foo-1.2.dist-info", + metadata=Metadata( + ( + "METADATA", + "Provides-Extra: baz\n" + "Requires-Dist: quux; extra=='baz'\n" + "Provides-Extra: bar\n" + "Requires-Dist: fred; extra=='bar'\n", + ) + ), + ) + ad.add(Foo) + quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") + ad.add(quux) + fred = Distribution.from_filename("/foo_dir/fred-0.1.dist-info") + ad.add(fred) + res = list(ws.resolve(parse_requirements("Foo[baz,bar]"), ad)) + assert sorted(res) == [fred, quux, Foo] + + def test_marker_evaluation_with_extras_loop(self): + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + a = Distribution.from_filename( + "/foo_dir/a-0.2.dist-info", + metadata=Metadata(("METADATA", "Requires-Dist: c[a]")), + ) + b = Distribution.from_filename( + "/foo_dir/b-0.3.dist-info", + metadata=Metadata(("METADATA", "Requires-Dist: c[b]")), + ) + c = Distribution.from_filename( + "/foo_dir/c-1.0.dist-info", + metadata=Metadata( + ( + "METADATA", + "Provides-Extra: a\n" + "Requires-Dist: b;extra=='a'\n" + "Provides-Extra: b\n" + "Requires-Dist: foo;extra=='b'", + ) + ), + ) + foo = Distribution.from_filename("/foo_dir/foo-0.1.dist-info") + for dist in (a, b, c, foo): + ad.add(dist) + res = list(ws.resolve(parse_requirements("a"), ad)) + assert res == [a, c, b, foo] + + @pytest.mark.xfail( + sys.version_info[:2] == (3, 12) and sys.version_info.releaselevel != 'final', + reason="https://github.com/python/cpython/issues/103632", + ) + def testDistroDependsOptions(self): + d = self.distRequires( + """ + Twisted>=1.5 + [docgen] + ZConfig>=2.0 + docutils>=0.3 + [fastcgi] + fcgiapp>=0.1""" + ) + self.checkRequires(d, "Twisted>=1.5") + self.checkRequires( + d, "Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] + ) + self.checkRequires(d, "Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]) + self.checkRequires( + d, + "Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), + ["docgen", "fastcgi"], + ) + self.checkRequires( + d, + "Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), + ["fastcgi", "docgen"], + ) + with pytest.raises(pkg_resources.UnknownExtra): + d.requires(["foo"]) + + +class TestWorkingSet: + def test_find_conflicting(self): + ws = WorkingSet([]) + Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg") + ws.add(Foo) + + # create a requirement that conflicts with Foo 1.2 + req = next(parse_requirements("Foo<1.2")) + + with pytest.raises(VersionConflict) as vc: + ws.find(req) + + msg = 'Foo 1.2 is installed but Foo<1.2 is required' + assert vc.value.report() == msg + + def test_resolve_conflicts_with_prior(self): + """ + A ContextualVersionConflict should be raised when a requirement + conflicts with a prior requirement for a different package. + """ + # Create installation where Foo depends on Baz 1.0 and Bar depends on + # Baz 2.0. + ws = WorkingSet([]) + md = Metadata(('depends.txt', "Baz==1.0")) + Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md) + ws.add(Foo) + md = Metadata(('depends.txt', "Baz==2.0")) + Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md) + ws.add(Bar) + Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg") + ws.add(Baz) + Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg") + ws.add(Baz) + + with pytest.raises(VersionConflict) as vc: + ws.resolve(parse_requirements("Foo\nBar\n")) + + msg = "Baz 1.0 is installed but Baz==2.0 is required by " + msg += repr(set(['Bar'])) + assert vc.value.report() == msg + + +class TestEntryPoints: + def assertfields(self, ep): + assert ep.name == "foo" + assert ep.module_name == "pkg_resources.tests.test_resources" + assert ep.attrs == ("TestEntryPoints",) + assert ep.extras == ("x",) + assert ep.load() is TestEntryPoints + expect = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]" + assert str(ep) == expect + + def setup_method(self, method): + self.dist = Distribution.from_filename( + "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt', '[x]')) + ) + + def testBasics(self): + ep = EntryPoint( + "foo", + "pkg_resources.tests.test_resources", + ["TestEntryPoints"], + ["x"], + self.dist, + ) + self.assertfields(ep) + + def testParse(self): + s = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]" + ep = EntryPoint.parse(s, self.dist) + self.assertfields(ep) + + ep = EntryPoint.parse("bar baz= spammity[PING]") + assert ep.name == "bar baz" + assert ep.module_name == "spammity" + assert ep.attrs == () + assert ep.extras == ("ping",) + + ep = EntryPoint.parse(" fizzly = wocka:foo") + assert ep.name == "fizzly" + assert ep.module_name == "wocka" + assert ep.attrs == ("foo",) + assert ep.extras == () + + # plus in the name + spec = "html+mako = mako.ext.pygmentplugin:MakoHtmlLexer" + ep = EntryPoint.parse(spec) + assert ep.name == 'html+mako' + + reject_specs = "foo", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2" + + @pytest.mark.parametrize("reject_spec", reject_specs) + def test_reject_spec(self, reject_spec): + with pytest.raises(ValueError): + EntryPoint.parse(reject_spec) + + def test_printable_name(self): + """ + Allow any printable character in the name. + """ + # Create a name with all printable characters; strip the whitespace. + name = string.printable.strip() + spec = "{name} = module:attr".format(**locals()) + ep = EntryPoint.parse(spec) + assert ep.name == name + + def checkSubMap(self, m): + assert len(m) == len(self.submap_expect) + for key, ep in self.submap_expect.items(): + assert m.get(key).name == ep.name + assert m.get(key).module_name == ep.module_name + assert sorted(m.get(key).attrs) == sorted(ep.attrs) + assert sorted(m.get(key).extras) == sorted(ep.extras) + + submap_expect = dict( + feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), + feature2=EntryPoint( + 'feature2', 'another.module', ['SomeClass'], ['extra1', 'extra2'] + ), + feature3=EntryPoint('feature3', 'this.module', extras=['something']), + ) + submap_str = """ + # define features for blah blah + feature1 = somemodule:somefunction + feature2 = another.module:SomeClass [extra1,extra2] + feature3 = this.module [something] + """ + + def testParseList(self): + self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) + with pytest.raises(ValueError): + EntryPoint.parse_group("x a", "foo=bar") + with pytest.raises(ValueError): + EntryPoint.parse_group("x", ["foo=baz", "foo=bar"]) + + def testParseMap(self): + m = EntryPoint.parse_map({'xyz': self.submap_str}) + self.checkSubMap(m['xyz']) + assert list(m.keys()) == ['xyz'] + m = EntryPoint.parse_map("[xyz]\n" + self.submap_str) + self.checkSubMap(m['xyz']) + assert list(m.keys()) == ['xyz'] + with pytest.raises(ValueError): + EntryPoint.parse_map(["[xyz]", "[xyz]"]) + with pytest.raises(ValueError): + EntryPoint.parse_map(self.submap_str) + + def testDeprecationWarnings(self): + ep = EntryPoint( + "foo", "pkg_resources.tests.test_resources", ["TestEntryPoints"], ["x"] + ) + with pytest.warns(pkg_resources.PkgResourcesDeprecationWarning): + ep.load(require=False) + + +class TestRequirements: + def testBasics(self): + r = Requirement.parse("Twisted>=1.2") + assert str(r) == "Twisted>=1.2" + assert repr(r) == "Requirement.parse('Twisted>=1.2')" + assert r == Requirement("Twisted>=1.2") + assert r == Requirement("twisTed>=1.2") + assert r != Requirement("Twisted>=2.0") + assert r != Requirement("Zope>=1.2") + assert r != Requirement("Zope>=3.0") + assert r != Requirement("Twisted[extras]>=1.2") + + def testOrdering(self): + r1 = Requirement("Twisted==1.2c1,>=1.2") + r2 = Requirement("Twisted>=1.2,==1.2c1") + assert r1 == r2 + assert str(r1) == str(r2) + assert str(r2) == "Twisted==1.2c1,>=1.2" + assert Requirement("Twisted") != Requirement( + "Twisted @ https://localhost/twisted.zip" + ) + + def testBasicContains(self): + r = Requirement("Twisted>=1.2") + foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") + twist11 = Distribution.from_filename("Twisted-1.1.egg") + twist12 = Distribution.from_filename("Twisted-1.2.egg") + assert parse_version('1.2') in r + assert parse_version('1.1') not in r + assert '1.2' in r + assert '1.1' not in r + assert foo_dist not in r + assert twist11 not in r + assert twist12 in r + + def testOptionsAndHashing(self): + r1 = Requirement.parse("Twisted[foo,bar]>=1.2") + r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") + assert r1 == r2 + assert set(r1.extras) == set(("foo", "bar")) + assert set(r2.extras) == set(("foo", "bar")) + assert hash(r1) == hash(r2) + assert hash(r1) == hash( + ( + "twisted", + None, + packaging.specifiers.SpecifierSet(">=1.2"), + frozenset(["foo", "bar"]), + None, + ) + ) + assert hash( + Requirement.parse("Twisted @ https://localhost/twisted.zip") + ) == hash( + ( + "twisted", + "https://localhost/twisted.zip", + packaging.specifiers.SpecifierSet(), + frozenset(), + None, + ) + ) + + def testVersionEquality(self): + r1 = Requirement.parse("foo==0.3a2") + r2 = Requirement.parse("foo!=0.3a4") + d = Distribution.from_filename + + assert d("foo-0.3a4.egg") not in r1 + assert d("foo-0.3a1.egg") not in r1 + assert d("foo-0.3a4.egg") not in r2 + + assert d("foo-0.3a2.egg") in r1 + assert d("foo-0.3a2.egg") in r2 + assert d("foo-0.3a3.egg") in r2 + assert d("foo-0.3a5.egg") in r2 + + def testSetuptoolsProjectName(self): + """ + The setuptools project should implement the setuptools package. + """ + + assert Requirement.parse('setuptools').project_name == 'setuptools' + # setuptools 0.7 and higher means setuptools. + assert Requirement.parse('setuptools == 0.7').project_name == 'setuptools' + assert Requirement.parse('setuptools == 0.7a1').project_name == 'setuptools' + assert Requirement.parse('setuptools >= 0.7').project_name == 'setuptools' + + +class TestParsing: + def testEmptyParse(self): + assert list(parse_requirements('')) == [] + + def testYielding(self): + for inp, out in [ + ([], []), + ('x', ['x']), + ([[]], []), + (' x\n y', ['x', 'y']), + (['x\n\n', 'y'], ['x', 'y']), + ]: + assert list(pkg_resources.yield_lines(inp)) == out + + def testSplitting(self): + sample = """ + x + [Y] + z + + a + [b ] + # foo + c + [ d] + [q] + v + """ + assert list(pkg_resources.split_sections(sample)) == [ + (None, ["x"]), + ("Y", ["z", "a"]), + ("b", ["c"]), + ("d", []), + ("q", ["v"]), + ] + with pytest.raises(ValueError): + list(pkg_resources.split_sections("[foo")) + + def testSafeName(self): + assert safe_name("adns-python") == "adns-python" + assert safe_name("WSGI Utils") == "WSGI-Utils" + assert safe_name("WSGI Utils") == "WSGI-Utils" + assert safe_name("Money$$$Maker") == "Money-Maker" + assert safe_name("peak.web") != "peak-web" + + def testSafeVersion(self): + assert safe_version("1.2-1") == "1.2.post1" + assert safe_version("1.2 alpha") == "1.2.alpha" + assert safe_version("2.3.4 20050521") == "2.3.4.20050521" + assert safe_version("Money$$$Maker") == "Money-Maker" + assert safe_version("peak.web") == "peak.web" + + def testSimpleRequirements(self): + assert list(parse_requirements('Twis-Ted>=1.2-1')) == [ + Requirement('Twis-Ted>=1.2-1') + ] + assert list(parse_requirements('Twisted >=1.2, \\ # more\n<2.0')) == [ + Requirement('Twisted>=1.2,<2.0') + ] + assert Requirement.parse("FooBar==1.99a3") == Requirement("FooBar==1.99a3") + with pytest.raises(ValueError): + Requirement.parse(">=2.3") + with pytest.raises(ValueError): + Requirement.parse("x\\") + with pytest.raises(ValueError): + Requirement.parse("x==2 q") + with pytest.raises(ValueError): + Requirement.parse("X==1\nY==2") + with pytest.raises(ValueError): + Requirement.parse("#") + + def test_requirements_with_markers(self): + assert Requirement.parse("foobar;os_name=='a'") == Requirement.parse( + "foobar;os_name=='a'" + ) + assert Requirement.parse( + "name==1.1;python_version=='2.7'" + ) != Requirement.parse("name==1.1;python_version=='3.6'") + assert Requirement.parse( + "name==1.0;python_version=='2.7'" + ) != Requirement.parse("name==1.2;python_version=='2.7'") + assert Requirement.parse( + "name[foo]==1.0;python_version=='3.6'" + ) != Requirement.parse("name[foo,bar]==1.0;python_version=='3.6'") + + def test_local_version(self): + (req,) = parse_requirements('foo==1.0+org1') + + def test_spaces_between_multiple_versions(self): + (req,) = parse_requirements('foo>=1.0, <3') + (req,) = parse_requirements('foo >= 1.0, < 3') + + @pytest.mark.parametrize( + ['lower', 'upper'], + [ + ('1.2-rc1', '1.2rc1'), + ('0.4', '0.4.0'), + ('0.4.0.0', '0.4.0'), + ('0.4.0-0', '0.4-0'), + ('0post1', '0.0post1'), + ('0pre1', '0.0c1'), + ('0.0.0preview1', '0c1'), + ('0.0c1', '0-rc1'), + ('1.2a1', '1.2.a.1'), + ('1.2.a', '1.2a'), + ], + ) + def testVersionEquality(self, lower, upper): + assert parse_version(lower) == parse_version(upper) + + torture = """ + 0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1 + 0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2 + 0.77.2-1 0.77.1-1 0.77.0-1 + """ + + @pytest.mark.parametrize( + ['lower', 'upper'], + [ + ('2.1', '2.1.1'), + ('2a1', '2b0'), + ('2a1', '2.1'), + ('2.3a1', '2.3'), + ('2.1-1', '2.1-2'), + ('2.1-1', '2.1.1'), + ('2.1', '2.1post4'), + ('2.1a0-20040501', '2.1'), + ('1.1', '02.1'), + ('3.2', '3.2.post0'), + ('3.2post1', '3.2post2'), + ('0.4', '4.0'), + ('0.0.4', '0.4.0'), + ('0post1', '0.4post1'), + ('2.1.0-rc1', '2.1.0'), + ('2.1dev', '2.1a0'), + ] + + list(pairwise(reversed(torture.split()))), + ) + def testVersionOrdering(self, lower, upper): + assert parse_version(lower) < parse_version(upper) + + def testVersionHashable(self): + """ + Ensure that our versions stay hashable even though we've subclassed + them and added some shim code to them. + """ + assert hash(parse_version("1.0")) == hash(parse_version("1.0")) + + +class TestNamespaces: + ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n" + + @pytest.fixture + def symlinked_tmpdir(self, tmpdir): + """ + Where available, return the tempdir as a symlink, + which as revealed in #231 is more fragile than + a natural tempdir. + """ + if not hasattr(os, 'symlink'): + yield str(tmpdir) + return + + link_name = str(tmpdir) + '-linked' + os.symlink(str(tmpdir), link_name) + try: + yield type(tmpdir)(link_name) + finally: + os.unlink(link_name) + + @pytest.fixture(autouse=True) + def patched_path(self, tmpdir): + """ + Patch sys.path to include the 'site-pkgs' dir. Also + restore pkg_resources._namespace_packages to its + former state. + """ + saved_ns_pkgs = pkg_resources._namespace_packages.copy() + saved_sys_path = sys.path[:] + site_pkgs = tmpdir.mkdir('site-pkgs') + sys.path.append(str(site_pkgs)) + try: + yield + finally: + pkg_resources._namespace_packages = saved_ns_pkgs + sys.path = saved_sys_path + + issue591 = pytest.mark.xfail(platform.system() == 'Windows', reason="#591") + + @issue591 + def test_two_levels_deep(self, symlinked_tmpdir): + """ + Test nested namespace packages + Create namespace packages in the following tree : + site-packages-1/pkg1/pkg2 + site-packages-2/pkg1/pkg2 + Check both are in the _namespace_packages dict and that their __path__ + is correct + """ + real_tmpdir = symlinked_tmpdir.realpath() + tmpdir = symlinked_tmpdir + sys.path.append(str(tmpdir / 'site-pkgs2')) + site_dirs = tmpdir / 'site-pkgs', tmpdir / 'site-pkgs2' + for site in site_dirs: + pkg1 = site / 'pkg1' + pkg2 = pkg1 / 'pkg2' + pkg2.ensure_dir() + (pkg1 / '__init__.py').write_text(self.ns_str, encoding='utf-8') + (pkg2 / '__init__.py').write_text(self.ns_str, encoding='utf-8') + with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): + import pkg1 + assert "pkg1" in pkg_resources._namespace_packages + # attempt to import pkg2 from site-pkgs2 + with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): + import pkg1.pkg2 + # check the _namespace_packages dict + assert "pkg1.pkg2" in pkg_resources._namespace_packages + assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"] + # check the __path__ attribute contains both paths + expected = [ + str(real_tmpdir / "site-pkgs" / "pkg1" / "pkg2"), + str(real_tmpdir / "site-pkgs2" / "pkg1" / "pkg2"), + ] + assert pkg1.pkg2.__path__ == expected + + @issue591 + def test_path_order(self, symlinked_tmpdir): + """ + Test that if multiple versions of the same namespace package subpackage + are on different sys.path entries, that only the one earliest on + sys.path is imported, and that the namespace package's __path__ is in + the correct order. + + Regression test for https://github.com/pypa/setuptools/issues/207 + """ + + tmpdir = symlinked_tmpdir + site_dirs = ( + tmpdir / "site-pkgs", + tmpdir / "site-pkgs2", + tmpdir / "site-pkgs3", + ) + + vers_str = "__version__ = %r" + + for number, site in enumerate(site_dirs, 1): + if number > 1: + sys.path.append(str(site)) + nspkg = site / 'nspkg' + subpkg = nspkg / 'subpkg' + subpkg.ensure_dir() + (nspkg / '__init__.py').write_text(self.ns_str, encoding='utf-8') + (subpkg / '__init__.py').write_text(vers_str % number, encoding='utf-8') + + with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"): + import nspkg.subpkg + import nspkg + expected = [str(site.realpath() / 'nspkg') for site in site_dirs] + assert nspkg.__path__ == expected + assert nspkg.subpkg.__version__ == 1 diff --git a/setuptools/_importlib.py b/setuptools/_importlib.py index fadfd00e58..c8344b7cf0 100644 --- a/setuptools/_importlib.py +++ b/setuptools/_importlib.py @@ -1,51 +1,51 @@ -import sys - - -def disable_importlib_metadata_finder(metadata): - """ - Ensure importlib_metadata doesn't provide older, incompatible - Distributions. - - Workaround for #3102. - """ - try: - import importlib_metadata - except ImportError: - return - except AttributeError: - from .warnings import SetuptoolsWarning - - SetuptoolsWarning.emit( - "Incompatibility problem.", - """ - `importlib-metadata` version is incompatible with `setuptools`. - This problem is likely to be solved by installing an updated version of - `importlib-metadata`. - """, - see_url="https://github.com/python/importlib_metadata/issues/396", - ) # Ensure a descriptive message is shown. - raise # This exception can be suppressed by _distutils_hack - - if importlib_metadata is metadata: - return - to_remove = [ - ob - for ob in sys.meta_path - if isinstance(ob, importlib_metadata.MetadataPathFinder) - ] - for item in to_remove: - sys.meta_path.remove(item) - - -if sys.version_info < (3, 10): - from setuptools.extern import importlib_metadata as metadata # type: ignore[attr-defined] - - disable_importlib_metadata_finder(metadata) -else: - import importlib.metadata as metadata # noqa: F401 - - -if sys.version_info < (3, 9): - from setuptools.extern import importlib_resources as resources # type: ignore[attr-defined] -else: - import importlib.resources as resources # noqa: F401 +import sys + + +def disable_importlib_metadata_finder(metadata): + """ + Ensure importlib_metadata doesn't provide older, incompatible + Distributions. + + Workaround for #3102. + """ + try: + import importlib_metadata + except ImportError: + return + except AttributeError: + from .warnings import SetuptoolsWarning + + SetuptoolsWarning.emit( + "Incompatibility problem.", + """ + `importlib-metadata` version is incompatible with `setuptools`. + This problem is likely to be solved by installing an updated version of + `importlib-metadata`. + """, + see_url="https://github.com/python/importlib_metadata/issues/396", + ) # Ensure a descriptive message is shown. + raise # This exception can be suppressed by _distutils_hack + + if importlib_metadata is metadata: + return + to_remove = [ + ob + for ob in sys.meta_path + if isinstance(ob, importlib_metadata.MetadataPathFinder) + ] + for item in to_remove: + sys.meta_path.remove(item) + + +if sys.version_info < (3, 10): + from setuptools.extern import importlib_metadata as metadata # type: ignore[attr-defined] + + disable_importlib_metadata_finder(metadata) +else: + import importlib.metadata as metadata # noqa: F401 + + +if sys.version_info < (3, 9): + from setuptools.extern import importlib_resources as resources # type: ignore[attr-defined] +else: + import importlib.resources as resources # noqa: F401 diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py index 9c7455a47d..1f4d1e23a2 100644 --- a/setuptools/_normalization.py +++ b/setuptools/_normalization.py @@ -1,125 +1,125 @@ -""" -Helpers for normalization as expected in wheel/sdist/module file names -and core metadata -""" -import re -from pathlib import Path -from typing import Union - -from .extern import packaging # type: ignore[attr-defined] -from .warnings import SetuptoolsDeprecationWarning - -_Path = Union[str, Path] - -# https://packaging.python.org/en/latest/specifications/core-metadata/#name -_VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I) -_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9.]+", re.I) -_NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.I) - - -def safe_identifier(name: str) -> str: - """Make a string safe to be used as Python identifier. - >>> safe_identifier("12abc") - '_12abc' - >>> safe_identifier("__editable__.myns.pkg-78.9.3_local") - '__editable___myns_pkg_78_9_3_local' - """ - safe = re.sub(r'\W|^(?=\d)', '_', name) - assert safe.isidentifier() - return safe - - -def safe_name(component: str) -> str: - """Escape a component used as a project name according to Core Metadata. - >>> safe_name("hello world") - 'hello-world' - >>> safe_name("hello?world") - 'hello-world' - """ - # See pkg_resources.safe_name - return _UNSAFE_NAME_CHARS.sub("-", component) - - -def safe_version(version: str) -> str: - """Convert an arbitrary string into a valid version string. - >>> safe_version("1988 12 25") - '1988.12.25' - >>> safe_version("v0.2.1") - '0.2.1' - >>> safe_version("v0.2?beta") - '0.2b0' - >>> safe_version("v0.2 beta") - '0.2b0' - >>> safe_version("ubuntu lts") - Traceback (most recent call last): - ... - setuptools.extern.packaging.version.InvalidVersion: Invalid version: 'ubuntu.lts' - """ - v = version.replace(' ', '.') - try: - return str(packaging.version.Version(v)) - except packaging.version.InvalidVersion: - attempt = _UNSAFE_NAME_CHARS.sub("-", v) - return str(packaging.version.Version(attempt)) - - -def best_effort_version(version: str) -> str: - """Convert an arbitrary string into a version-like string. - >>> best_effort_version("v0.2 beta") - '0.2b0' - - >>> import warnings - >>> warnings.simplefilter("ignore", category=SetuptoolsDeprecationWarning) - >>> best_effort_version("ubuntu lts") - 'ubuntu.lts' - """ - # See pkg_resources.safe_version - try: - return safe_version(version) - except packaging.version.InvalidVersion: - SetuptoolsDeprecationWarning.emit( - f"Invalid version: {version!r}.", - f""" - Version {version!r} is not valid according to PEP 440. - - Please make sure to specify a valid version for your package. - Also note that future releases of setuptools may halt the build process - if an invalid version is given. - """, - see_url="https://peps.python.org/pep-0440/", - due_date=(2023, 9, 26), # See setuptools/dist _validate_version - ) - v = version.replace(' ', '.') - return safe_name(v) - - -def safe_extra(extra: str) -> str: - """Normalize extra name according to PEP 685 - >>> safe_extra("_FrIeNdLy-._.-bArD") - 'friendly-bard' - >>> safe_extra("FrIeNdLy-._.-bArD__._-") - 'friendly-bard' - """ - return _NON_ALPHANUMERIC.sub("-", extra).strip("-").lower() - - -def filename_component(value: str) -> str: - """Normalize each component of a filename (e.g. distribution/version part of wheel) - Note: ``value`` needs to be already normalized. - >>> filename_component("my-pkg") - 'my_pkg' - """ - return value.replace("-", "_").strip("_") - - -def safer_name(value: str) -> str: - """Like ``safe_name`` but can be used as filename component for wheel""" - # See bdist_wheel.safer_name - return filename_component(safe_name(value)) - - -def safer_best_effort_version(value: str) -> str: - """Like ``best_effort_version`` but can be used as filename component for wheel""" - # See bdist_wheel.safer_verion - # TODO: Replace with only safe_version in the future (no need for best effort) - return filename_component(best_effort_version(value)) +""" +Helpers for normalization as expected in wheel/sdist/module file names +and core metadata +""" +import re +from pathlib import Path +from typing import Union + +from .extern import packaging # type: ignore[attr-defined] +from .warnings import SetuptoolsDeprecationWarning + +_Path = Union[str, Path] + +# https://packaging.python.org/en/latest/specifications/core-metadata/#name +_VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I) +_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9.]+", re.I) +_NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.I) + + +def safe_identifier(name: str) -> str: + """Make a string safe to be used as Python identifier. + >>> safe_identifier("12abc") + '_12abc' + >>> safe_identifier("__editable__.myns.pkg-78.9.3_local") + '__editable___myns_pkg_78_9_3_local' + """ + safe = re.sub(r'\W|^(?=\d)', '_', name) + assert safe.isidentifier() + return safe + + +def safe_name(component: str) -> str: + """Escape a component used as a project name according to Core Metadata. + >>> safe_name("hello world") + 'hello-world' + >>> safe_name("hello?world") + 'hello-world' + """ + # See pkg_resources.safe_name + return _UNSAFE_NAME_CHARS.sub("-", component) + + +def safe_version(version: str) -> str: + """Convert an arbitrary string into a valid version string. + >>> safe_version("1988 12 25") + '1988.12.25' + >>> safe_version("v0.2.1") + '0.2.1' + >>> safe_version("v0.2?beta") + '0.2b0' + >>> safe_version("v0.2 beta") + '0.2b0' + >>> safe_version("ubuntu lts") + Traceback (most recent call last): + ... + setuptools.extern.packaging.version.InvalidVersion: Invalid version: 'ubuntu.lts' + """ + v = version.replace(' ', '.') + try: + return str(packaging.version.Version(v)) + except packaging.version.InvalidVersion: + attempt = _UNSAFE_NAME_CHARS.sub("-", v) + return str(packaging.version.Version(attempt)) + + +def best_effort_version(version: str) -> str: + """Convert an arbitrary string into a version-like string. + >>> best_effort_version("v0.2 beta") + '0.2b0' + + >>> import warnings + >>> warnings.simplefilter("ignore", category=SetuptoolsDeprecationWarning) + >>> best_effort_version("ubuntu lts") + 'ubuntu.lts' + """ + # See pkg_resources.safe_version + try: + return safe_version(version) + except packaging.version.InvalidVersion: + SetuptoolsDeprecationWarning.emit( + f"Invalid version: {version!r}.", + f""" + Version {version!r} is not valid according to PEP 440. + + Please make sure to specify a valid version for your package. + Also note that future releases of setuptools may halt the build process + if an invalid version is given. + """, + see_url="https://peps.python.org/pep-0440/", + due_date=(2023, 9, 26), # See setuptools/dist _validate_version + ) + v = version.replace(' ', '.') + return safe_name(v) + + +def safe_extra(extra: str) -> str: + """Normalize extra name according to PEP 685 + >>> safe_extra("_FrIeNdLy-._.-bArD") + 'friendly-bard' + >>> safe_extra("FrIeNdLy-._.-bArD__._-") + 'friendly-bard' + """ + return _NON_ALPHANUMERIC.sub("-", extra).strip("-").lower() + + +def filename_component(value: str) -> str: + """Normalize each component of a filename (e.g. distribution/version part of wheel) + Note: ``value`` needs to be already normalized. + >>> filename_component("my-pkg") + 'my_pkg' + """ + return value.replace("-", "_").strip("_") + + +def safer_name(value: str) -> str: + """Like ``safe_name`` but can be used as filename component for wheel""" + # See bdist_wheel.safer_name + return filename_component(safe_name(value)) + + +def safer_best_effort_version(value: str) -> str: + """Like ``best_effort_version`` but can be used as filename component for wheel""" + # See bdist_wheel.safer_verion + # TODO: Replace with only safe_version in the future (no need for best effort) + return filename_component(best_effort_version(value)) diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index 31463dc2b8..3c3140df26 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -1,735 +1,735 @@ -"""setuptools.command.egg_info - -Create a distribution's .egg-info directory and contents""" - -from distutils.filelist import FileList as _FileList -from distutils.errors import DistutilsInternalError -from distutils.util import convert_path -from distutils import log -import distutils.errors -import distutils.filelist -import functools -import os -import re -import sys -import time -import collections - -from .._importlib import metadata -from .. import _entry_points, _normalization -from . import _requirestxt - -from setuptools import Command -from setuptools.command.sdist import sdist -from setuptools.command.sdist import walk_revctrl -from setuptools.command.setopt import edit_config -from setuptools.command import bdist_egg -import setuptools.unicode_utils as unicode_utils -from setuptools.glob import glob - -from setuptools.extern import packaging # type: ignore[attr-defined] -from ..warnings import SetuptoolsDeprecationWarning - - -PY_MAJOR = '{}.{}'.format(*sys.version_info) - - -def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME - """ - Translate a file path glob like '*.txt' in to a regular expression. - This differs from fnmatch.translate which allows wildcards to match - directory separators. It also knows about '**/' which matches any number of - directories. - """ - pat = '' - - # This will split on '/' within [character classes]. This is deliberate. - chunks = glob.split(os.path.sep) - - sep = re.escape(os.sep) - valid_char = '[^%s]' % (sep,) - - for c, chunk in enumerate(chunks): - last_chunk = c == len(chunks) - 1 - - # Chunks that are a literal ** are globstars. They match anything. - if chunk == '**': - if last_chunk: - # Match anything if this is the last component - pat += '.*' - else: - # Match '(name/)*' - pat += '(?:%s+%s)*' % (valid_char, sep) - continue # Break here as the whole path component has been handled - - # Find any special characters in the remainder - i = 0 - chunk_len = len(chunk) - while i < chunk_len: - char = chunk[i] - if char == '*': - # Match any number of name characters - pat += valid_char + '*' - elif char == '?': - # Match a name character - pat += valid_char - elif char == '[': - # Character class - inner_i = i + 1 - # Skip initial !/] chars - if inner_i < chunk_len and chunk[inner_i] == '!': - inner_i = inner_i + 1 - if inner_i < chunk_len and chunk[inner_i] == ']': - inner_i = inner_i + 1 - - # Loop till the closing ] is found - while inner_i < chunk_len and chunk[inner_i] != ']': - inner_i = inner_i + 1 - - if inner_i >= chunk_len: - # Got to the end of the string without finding a closing ] - # Do not treat this as a matching group, but as a literal [ - pat += re.escape(char) - else: - # Grab the insides of the [brackets] - inner = chunk[i + 1 : inner_i] - char_class = '' - - # Class negation - if inner[0] == '!': - char_class = '^' - inner = inner[1:] - - char_class += re.escape(inner) - pat += '[%s]' % (char_class,) - - # Skip to the end ] - i = inner_i - else: - pat += re.escape(char) - i += 1 - - # Join each chunk with the dir separator - if not last_chunk: - pat += sep - - pat += r'\Z' - return re.compile(pat, flags=re.MULTILINE | re.DOTALL) - - -class InfoCommon: - tag_build = None - tag_date = None - - @property - def name(self): - return _normalization.safe_name(self.distribution.get_name()) - - def tagged_version(self): - tagged = self._maybe_tag(self.distribution.get_version()) - return _normalization.best_effort_version(tagged) - - def _maybe_tag(self, version): - """ - egg_info may be called more than once for a distribution, - in which case the version string already contains all tags. - """ - return ( - version - if self.vtags and self._already_tagged(version) - else version + self.vtags - ) - - def _already_tagged(self, version: str) -> bool: - # Depending on their format, tags may change with version normalization. - # So in addition the regular tags, we have to search for the normalized ones. - return version.endswith(self.vtags) or version.endswith(self._safe_tags()) - - def _safe_tags(self) -> str: - # To implement this we can rely on `safe_version` pretending to be version 0 - # followed by tags. Then we simply discard the starting 0 (fake version number) - return _normalization.best_effort_version(f"0{self.vtags}")[1:] - - def tags(self) -> str: - version = '' - if self.tag_build: - version += self.tag_build - if self.tag_date: - version += time.strftime("%Y%m%d") - return version - - vtags = property(tags) - - -class egg_info(InfoCommon, Command): - description = "create a distribution's .egg-info directory" - - user_options = [ - ( - 'egg-base=', - 'e', - "directory containing .egg-info directories" - " (default: top of the source tree)", - ), - ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), - ('tag-build=', 'b', "Specify explicit tag to add to version number"), - ('no-date', 'D', "Don't include date stamp [default]"), - ] - - boolean_options = ['tag-date'] - negative_opt = { - 'no-date': 'tag-date', - } - - def initialize_options(self): - self.egg_base = None - self.egg_name = None - self.egg_info = None - self.egg_version = None - self.ignore_egg_info_in_manifest = False - - #################################### - # allow the 'tag_svn_revision' to be detected and - # set, supporting sdists built on older Setuptools. - @property - def tag_svn_revision(self): - pass - - @tag_svn_revision.setter - def tag_svn_revision(self, value): - pass - - #################################### - - def save_version_info(self, filename): - """ - Materialize the value of date into the - build tag. Install build keys in a deterministic order - to avoid arbitrary reordering on subsequent builds. - """ - egg_info = collections.OrderedDict() - # follow the order these keys would have been added - # when PYTHONHASHSEED=0 - egg_info['tag_build'] = self.tags() - egg_info['tag_date'] = 0 - edit_config(filename, dict(egg_info=egg_info)) - - def finalize_options(self): - # Note: we need to capture the current value returned - # by `self.tagged_version()`, so we can later update - # `self.distribution.metadata.version` without - # repercussions. - self.egg_name = self.name - self.egg_version = self.tagged_version() - parsed_version = packaging.version.Version(self.egg_version) - - try: - is_version = isinstance(parsed_version, packaging.version.Version) - spec = "%s==%s" if is_version else "%s===%s" - packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version)) - except ValueError as e: - raise distutils.errors.DistutilsOptionError( - "Invalid distribution name or version syntax: %s-%s" - % (self.egg_name, self.egg_version) - ) from e - - if self.egg_base is None: - dirs = self.distribution.package_dir - self.egg_base = (dirs or {}).get('', os.curdir) - - self.ensure_dirname('egg_base') - self.egg_info = _normalization.filename_component(self.egg_name) + '.egg-info' - if self.egg_base != os.curdir: - self.egg_info = os.path.join(self.egg_base, self.egg_info) - - # Set package version for the benefit of dumber commands - # (e.g. sdist, bdist_wininst, etc.) - # - self.distribution.metadata.version = self.egg_version - - # If we bootstrapped around the lack of a PKG-INFO, as might be the - # case in a fresh checkout, make sure that any special tags get added - # to the version info - # - pd = self.distribution._patched_dist - key = getattr(pd, "key", None) or getattr(pd, "name", None) - if pd is not None and key == self.egg_name.lower(): - pd._version = self.egg_version - pd._parsed_version = packaging.version.Version(self.egg_version) - self.distribution._patched_dist = None - - def _get_egg_basename(self, py_version=PY_MAJOR, platform=None): - """Compute filename of the output egg. Private API.""" - return _egg_basename(self.egg_name, self.egg_version, py_version, platform) - - def write_or_delete_file(self, what, filename, data, force=False): - """Write `data` to `filename` or delete if empty - - If `data` is non-empty, this routine is the same as ``write_file()``. - If `data` is empty but not ``None``, this is the same as calling - ``delete_file(filename)`. If `data` is ``None``, then this is a no-op - unless `filename` exists, in which case a warning is issued about the - orphaned file (if `force` is false), or deleted (if `force` is true). - """ - if data: - self.write_file(what, filename, data) - elif os.path.exists(filename): - if data is None and not force: - log.warn("%s not set in setup(), but %s exists", what, filename) - return - else: - self.delete_file(filename) - - def write_file(self, what, filename, data): - """Write `data` to `filename` (if not a dry run) after announcing it - - `what` is used in a log message to identify what is being written - to the file. - """ - log.info("writing %s to %s", what, filename) - data = data.encode("utf-8") - if not self.dry_run: - f = open(filename, 'wb') - f.write(data) - f.close() - - def delete_file(self, filename): - """Delete `filename` (if not a dry run) after announcing it""" - log.info("deleting %s", filename) - if not self.dry_run: - os.unlink(filename) - - def run(self): - self.mkpath(self.egg_info) - try: - os.utime(self.egg_info, None) - except OSError as e: - msg = f"Cannot update time stamp of directory '{self.egg_info}'" - raise distutils.errors.DistutilsFileError(msg) from e - for ep in metadata.entry_points(group='egg_info.writers'): - writer = ep.load() - writer(self, ep.name, os.path.join(self.egg_info, ep.name)) - - # Get rid of native_libs.txt if it was put there by older bdist_egg - nl = os.path.join(self.egg_info, "native_libs.txt") - if os.path.exists(nl): - self.delete_file(nl) - - self.find_sources() - - def find_sources(self): - """Generate SOURCES.txt manifest file""" - manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") - mm = manifest_maker(self.distribution) - mm.ignore_egg_info_dir = self.ignore_egg_info_in_manifest - mm.manifest = manifest_filename - mm.run() - self.filelist = mm.filelist - - -class FileList(_FileList): - # Implementations of the various MANIFEST.in commands - - def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir=False): - super().__init__(warn, debug_print) - self.ignore_egg_info_dir = ignore_egg_info_dir - - def process_template_line(self, line): - # Parse the line: split it up, make sure the right number of words - # is there, and return the relevant words. 'action' is always - # defined: it's the first word of the line. Which of the other - # three are defined depends on the action; it'll be either - # patterns, (dir and patterns), or (dir_pattern). - (action, patterns, dir, dir_pattern) = self._parse_template_line(line) - - action_map = { - 'include': self.include, - 'exclude': self.exclude, - 'global-include': self.global_include, - 'global-exclude': self.global_exclude, - 'recursive-include': functools.partial( - self.recursive_include, - dir, - ), - 'recursive-exclude': functools.partial( - self.recursive_exclude, - dir, - ), - 'graft': self.graft, - 'prune': self.prune, - } - log_map = { - 'include': "warning: no files found matching '%s'", - 'exclude': ("warning: no previously-included files found " "matching '%s'"), - 'global-include': ( - "warning: no files found matching '%s' " "anywhere in distribution" - ), - 'global-exclude': ( - "warning: no previously-included files matching " - "'%s' found anywhere in distribution" - ), - 'recursive-include': ( - "warning: no files found matching '%s' " "under directory '%s'" - ), - 'recursive-exclude': ( - "warning: no previously-included files matching " - "'%s' found under directory '%s'" - ), - 'graft': "warning: no directories found matching '%s'", - 'prune': "no previously-included directories found matching '%s'", - } - - try: - process_action = action_map[action] - except KeyError: - raise DistutilsInternalError( - "this cannot happen: invalid action '{action!s}'".format(action=action), - ) - - # OK, now we know that the action is valid and we have the - # right number of words on the line for that action -- so we - # can proceed with minimal error-checking. - - action_is_recursive = action.startswith('recursive-') - if action in {'graft', 'prune'}: - patterns = [dir_pattern] - extra_log_args = (dir,) if action_is_recursive else () - log_tmpl = log_map[action] - - self.debug_print( - ' '.join( - [action] + ([dir] if action_is_recursive else []) + patterns, - ) - ) - for pattern in patterns: - if not process_action(pattern): - log.warn(log_tmpl, pattern, *extra_log_args) - - def _remove_files(self, predicate): - """ - Remove all files from the file list that match the predicate. - Return True if any matching files were removed - """ - found = False - for i in range(len(self.files) - 1, -1, -1): - if predicate(self.files[i]): - self.debug_print(" removing " + self.files[i]) - del self.files[i] - found = True - return found - - def include(self, pattern): - """Include files that match 'pattern'.""" - found = [f for f in glob(pattern) if not os.path.isdir(f)] - self.extend(found) - return bool(found) - - def exclude(self, pattern): - """Exclude files that match 'pattern'.""" - match = translate_pattern(pattern) - return self._remove_files(match.match) - - def recursive_include(self, dir, pattern): - """ - Include all files anywhere in 'dir/' that match the pattern. - """ - full_pattern = os.path.join(dir, '**', pattern) - found = [f for f in glob(full_pattern, recursive=True) if not os.path.isdir(f)] - self.extend(found) - return bool(found) - - def recursive_exclude(self, dir, pattern): - """ - Exclude any file anywhere in 'dir/' that match the pattern. - """ - match = translate_pattern(os.path.join(dir, '**', pattern)) - return self._remove_files(match.match) - - def graft(self, dir): - """Include all files from 'dir/'.""" - found = [ - item - for match_dir in glob(dir) - for item in distutils.filelist.findall(match_dir) - ] - self.extend(found) - return bool(found) - - def prune(self, dir): - """Filter out files from 'dir/'.""" - match = translate_pattern(os.path.join(dir, '**')) - return self._remove_files(match.match) - - def global_include(self, pattern): - """ - Include all files anywhere in the current directory that match the - pattern. This is very inefficient on large file trees. - """ - if self.allfiles is None: - self.findall() - match = translate_pattern(os.path.join('**', pattern)) - found = [f for f in self.allfiles if match.match(f)] - self.extend(found) - return bool(found) - - def global_exclude(self, pattern): - """ - Exclude all files anywhere that match the pattern. - """ - match = translate_pattern(os.path.join('**', pattern)) - return self._remove_files(match.match) - - def append(self, item): - if item.endswith('\r'): # Fix older sdists built on Windows - item = item[:-1] - path = convert_path(item) - - if self._safe_path(path): - self.files.append(path) - - def extend(self, paths): - self.files.extend(filter(self._safe_path, paths)) - - def _repair(self): - """ - Replace self.files with only safe paths - - Because some owners of FileList manipulate the underlying - ``files`` attribute directly, this method must be called to - repair those paths. - """ - self.files = list(filter(self._safe_path, self.files)) - - def _safe_path(self, path): - enc_warn = "'%s' not %s encodable -- skipping" - - # To avoid accidental trans-codings errors, first to unicode - u_path = unicode_utils.filesys_decode(path) - if u_path is None: - log.warn("'%s' in unexpected encoding -- skipping" % path) - return False - - # Must ensure utf-8 encodability - utf8_path = unicode_utils.try_encode(u_path, "utf-8") - if utf8_path is None: - log.warn(enc_warn, path, 'utf-8') - return False - - try: - # ignore egg-info paths - is_egg_info = ".egg-info" in u_path or b".egg-info" in utf8_path - if self.ignore_egg_info_dir and is_egg_info: - return False - # accept is either way checks out - if os.path.exists(u_path) or os.path.exists(utf8_path): - return True - # this will catch any encode errors decoding u_path - except UnicodeEncodeError: - log.warn(enc_warn, path, sys.getfilesystemencoding()) - - -class manifest_maker(sdist): - template = "MANIFEST.in" - - def initialize_options(self): - self.use_defaults = 1 - self.prune = 1 - self.manifest_only = 1 - self.force_manifest = 1 - self.ignore_egg_info_dir = False - - def finalize_options(self): - pass - - def run(self): - self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir) - if not os.path.exists(self.manifest): - self.write_manifest() # it must exist so it'll get in the list - self.add_defaults() - if os.path.exists(self.template): - self.read_template() - self.add_license_files() - self._add_referenced_files() - self.prune_file_list() - self.filelist.sort() - self.filelist.remove_duplicates() - self.write_manifest() - - def _manifest_normalize(self, path): - path = unicode_utils.filesys_decode(path) - return path.replace(os.sep, '/') - - def write_manifest(self): - """ - Write the file list in 'self.filelist' to the manifest file - named by 'self.manifest'. - """ - self.filelist._repair() - - # Now _repairs should encodability, but not unicode - files = [self._manifest_normalize(f) for f in self.filelist.files] - msg = "writing manifest file '%s'" % self.manifest - self.execute(write_file, (self.manifest, files), msg) - - def warn(self, msg): - if not self._should_suppress_warning(msg): - sdist.warn(self, msg) - - @staticmethod - def _should_suppress_warning(msg): - """ - suppress missing-file warnings from sdist - """ - return re.match(r"standard file .*not found", msg) - - def add_defaults(self): - sdist.add_defaults(self) - self.filelist.append(self.template) - self.filelist.append(self.manifest) - rcfiles = list(walk_revctrl()) - if rcfiles: - self.filelist.extend(rcfiles) - elif os.path.exists(self.manifest): - self.read_manifest() - - if os.path.exists("setup.py"): - # setup.py should be included by default, even if it's not - # the script called to create the sdist - self.filelist.append("setup.py") - - ei_cmd = self.get_finalized_command('egg_info') - self.filelist.graft(ei_cmd.egg_info) - - def add_license_files(self): - license_files = self.distribution.metadata.license_files or [] - for lf in license_files: - log.info("adding license file '%s'", lf) - self.filelist.extend(license_files) - - def _add_referenced_files(self): - """Add files referenced by the config (e.g. `file:` directive) to filelist""" - referenced = getattr(self.distribution, '_referenced_files', []) - # ^-- fallback if dist comes from distutils or is a custom class - for rf in referenced: - log.debug("adding file referenced by config '%s'", rf) - self.filelist.extend(referenced) - - def prune_file_list(self): - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - self.filelist.prune(build.build_base) - self.filelist.prune(base_dir) - sep = re.escape(os.sep) - self.filelist.exclude_pattern( - r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=1 - ) - - def _safe_data_files(self, build_py): - """ - The parent class implementation of this method - (``sdist``) will try to include data files, which - might cause recursion problems when - ``include_package_data=True``. - - Therefore, avoid triggering any attempt of - analyzing/building the manifest again. - """ - if hasattr(build_py, 'get_data_files_without_manifest'): - return build_py.get_data_files_without_manifest() - - SetuptoolsDeprecationWarning.emit( - "`build_py` command does not inherit from setuptools' `build_py`.", - """ - Custom 'build_py' does not implement 'get_data_files_without_manifest'. - Please extend command classes from setuptools instead of distutils. - """, - see_url="https://peps.python.org/pep-0632/", - # due_date not defined yet, old projects might still do it? - ) - return build_py.get_data_files() - - -def write_file(filename, contents): - """Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ - contents = "\n".join(contents) - - # assuming the contents has been vetted for utf-8 encoding - contents = contents.encode("utf-8") - - with open(filename, "wb") as f: # always write POSIX-style manifest - f.write(contents) - - -def write_pkg_info(cmd, basename, filename): - log.info("writing %s", filename) - if not cmd.dry_run: - metadata = cmd.distribution.metadata - metadata.version, oldver = cmd.egg_version, metadata.version - metadata.name, oldname = cmd.egg_name, metadata.name - - try: - # write unescaped data to PKG-INFO, so older pkg_resources - # can still parse it - metadata.write_pkg_info(cmd.egg_info) - finally: - metadata.name, metadata.version = oldname, oldver - - safe = getattr(cmd.distribution, 'zip_safe', None) - - bdist_egg.write_safety_flag(cmd.egg_info, safe) - - -def warn_depends_obsolete(cmd, basename, filename): - """ - Unused: left to avoid errors when updating (from source) from <= 67.8. - Old installations have a .dist-info directory with the entry-point - ``depends.txt = setuptools.command.egg_info:warn_depends_obsolete``. - This may trigger errors when running the first egg_info in build_meta. - TODO: Remove this function in a version sufficiently > 68. - """ - - -# Export API used in entry_points -write_requirements = _requirestxt.write_requirements -write_setup_requirements = _requirestxt.write_setup_requirements - - -def write_toplevel_names(cmd, basename, filename): - pkgs = dict.fromkeys( - [k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names()] - ) - cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') - - -def overwrite_arg(cmd, basename, filename): - write_arg(cmd, basename, filename, True) - - -def write_arg(cmd, basename, filename, force=False): - argname = os.path.splitext(basename)[0] - value = getattr(cmd.distribution, argname, None) - if value is not None: - value = '\n'.join(value) + '\n' - cmd.write_or_delete_file(argname, filename, value, force) - - -def write_entries(cmd, basename, filename): - eps = _entry_points.load(cmd.distribution.entry_points) - defn = _entry_points.render(eps) - cmd.write_or_delete_file('entry points', filename, defn, True) - - -def _egg_basename(egg_name, egg_version, py_version=None, platform=None): - """Compute filename of the output egg. Private API.""" - name = _normalization.filename_component(egg_name) - version = _normalization.filename_component(egg_version) - egg = f"{name}-{version}-py{py_version or PY_MAJOR}" - if platform: - egg += f"-{platform}" - return egg - - -class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): - """Deprecated behavior warning for EggInfo, bypassing suppression.""" +"""setuptools.command.egg_info + +Create a distribution's .egg-info directory and contents""" + +from distutils.filelist import FileList as _FileList +from distutils.errors import DistutilsInternalError +from distutils.util import convert_path +from distutils import log +import distutils.errors +import distutils.filelist +import functools +import os +import re +import sys +import time +import collections + +from .._importlib import metadata +from .. import _entry_points, _normalization +from . import _requirestxt + +from setuptools import Command +from setuptools.command.sdist import sdist +from setuptools.command.sdist import walk_revctrl +from setuptools.command.setopt import edit_config +from setuptools.command import bdist_egg +import setuptools.unicode_utils as unicode_utils +from setuptools.glob import glob + +from setuptools.extern import packaging # type: ignore[attr-defined] +from ..warnings import SetuptoolsDeprecationWarning + + +PY_MAJOR = '{}.{}'.format(*sys.version_info) + + +def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME + """ + Translate a file path glob like '*.txt' in to a regular expression. + This differs from fnmatch.translate which allows wildcards to match + directory separators. It also knows about '**/' which matches any number of + directories. + """ + pat = '' + + # This will split on '/' within [character classes]. This is deliberate. + chunks = glob.split(os.path.sep) + + sep = re.escape(os.sep) + valid_char = '[^%s]' % (sep,) + + for c, chunk in enumerate(chunks): + last_chunk = c == len(chunks) - 1 + + # Chunks that are a literal ** are globstars. They match anything. + if chunk == '**': + if last_chunk: + # Match anything if this is the last component + pat += '.*' + else: + # Match '(name/)*' + pat += '(?:%s+%s)*' % (valid_char, sep) + continue # Break here as the whole path component has been handled + + # Find any special characters in the remainder + i = 0 + chunk_len = len(chunk) + while i < chunk_len: + char = chunk[i] + if char == '*': + # Match any number of name characters + pat += valid_char + '*' + elif char == '?': + # Match a name character + pat += valid_char + elif char == '[': + # Character class + inner_i = i + 1 + # Skip initial !/] chars + if inner_i < chunk_len and chunk[inner_i] == '!': + inner_i = inner_i + 1 + if inner_i < chunk_len and chunk[inner_i] == ']': + inner_i = inner_i + 1 + + # Loop till the closing ] is found + while inner_i < chunk_len and chunk[inner_i] != ']': + inner_i = inner_i + 1 + + if inner_i >= chunk_len: + # Got to the end of the string without finding a closing ] + # Do not treat this as a matching group, but as a literal [ + pat += re.escape(char) + else: + # Grab the insides of the [brackets] + inner = chunk[i + 1 : inner_i] + char_class = '' + + # Class negation + if inner[0] == '!': + char_class = '^' + inner = inner[1:] + + char_class += re.escape(inner) + pat += '[%s]' % (char_class,) + + # Skip to the end ] + i = inner_i + else: + pat += re.escape(char) + i += 1 + + # Join each chunk with the dir separator + if not last_chunk: + pat += sep + + pat += r'\Z' + return re.compile(pat, flags=re.MULTILINE | re.DOTALL) + + +class InfoCommon: + tag_build = None + tag_date = None + + @property + def name(self): + return _normalization.safe_name(self.distribution.get_name()) + + def tagged_version(self): + tagged = self._maybe_tag(self.distribution.get_version()) + return _normalization.best_effort_version(tagged) + + def _maybe_tag(self, version): + """ + egg_info may be called more than once for a distribution, + in which case the version string already contains all tags. + """ + return ( + version + if self.vtags and self._already_tagged(version) + else version + self.vtags + ) + + def _already_tagged(self, version: str) -> bool: + # Depending on their format, tags may change with version normalization. + # So in addition the regular tags, we have to search for the normalized ones. + return version.endswith(self.vtags) or version.endswith(self._safe_tags()) + + def _safe_tags(self) -> str: + # To implement this we can rely on `safe_version` pretending to be version 0 + # followed by tags. Then we simply discard the starting 0 (fake version number) + return _normalization.best_effort_version(f"0{self.vtags}")[1:] + + def tags(self) -> str: + version = '' + if self.tag_build: + version += self.tag_build + if self.tag_date: + version += time.strftime("%Y%m%d") + return version + + vtags = property(tags) + + +class egg_info(InfoCommon, Command): + description = "create a distribution's .egg-info directory" + + user_options = [ + ( + 'egg-base=', + 'e', + "directory containing .egg-info directories" + " (default: top of the source tree)", + ), + ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), + ('tag-build=', 'b', "Specify explicit tag to add to version number"), + ('no-date', 'D', "Don't include date stamp [default]"), + ] + + boolean_options = ['tag-date'] + negative_opt = { + 'no-date': 'tag-date', + } + + def initialize_options(self): + self.egg_base = None + self.egg_name = None + self.egg_info = None + self.egg_version = None + self.ignore_egg_info_in_manifest = False + + #################################### + # allow the 'tag_svn_revision' to be detected and + # set, supporting sdists built on older Setuptools. + @property + def tag_svn_revision(self): + pass + + @tag_svn_revision.setter + def tag_svn_revision(self, value): + pass + + #################################### + + def save_version_info(self, filename): + """ + Materialize the value of date into the + build tag. Install build keys in a deterministic order + to avoid arbitrary reordering on subsequent builds. + """ + egg_info = collections.OrderedDict() + # follow the order these keys would have been added + # when PYTHONHASHSEED=0 + egg_info['tag_build'] = self.tags() + egg_info['tag_date'] = 0 + edit_config(filename, dict(egg_info=egg_info)) + + def finalize_options(self): + # Note: we need to capture the current value returned + # by `self.tagged_version()`, so we can later update + # `self.distribution.metadata.version` without + # repercussions. + self.egg_name = self.name + self.egg_version = self.tagged_version() + parsed_version = packaging.version.Version(self.egg_version) + + try: + is_version = isinstance(parsed_version, packaging.version.Version) + spec = "%s==%s" if is_version else "%s===%s" + packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version)) + except ValueError as e: + raise distutils.errors.DistutilsOptionError( + "Invalid distribution name or version syntax: %s-%s" + % (self.egg_name, self.egg_version) + ) from e + + if self.egg_base is None: + dirs = self.distribution.package_dir + self.egg_base = (dirs or {}).get('', os.curdir) + + self.ensure_dirname('egg_base') + self.egg_info = _normalization.filename_component(self.egg_name) + '.egg-info' + if self.egg_base != os.curdir: + self.egg_info = os.path.join(self.egg_base, self.egg_info) + + # Set package version for the benefit of dumber commands + # (e.g. sdist, bdist_wininst, etc.) + # + self.distribution.metadata.version = self.egg_version + + # If we bootstrapped around the lack of a PKG-INFO, as might be the + # case in a fresh checkout, make sure that any special tags get added + # to the version info + # + pd = self.distribution._patched_dist + key = getattr(pd, "key", None) or getattr(pd, "name", None) + if pd is not None and key == self.egg_name.lower(): + pd._version = self.egg_version + pd._parsed_version = packaging.version.Version(self.egg_version) + self.distribution._patched_dist = None + + def _get_egg_basename(self, py_version=PY_MAJOR, platform=None): + """Compute filename of the output egg. Private API.""" + return _egg_basename(self.egg_name, self.egg_version, py_version, platform) + + def write_or_delete_file(self, what, filename, data, force=False): + """Write `data` to `filename` or delete if empty + + If `data` is non-empty, this routine is the same as ``write_file()``. + If `data` is empty but not ``None``, this is the same as calling + ``delete_file(filename)`. If `data` is ``None``, then this is a no-op + unless `filename` exists, in which case a warning is issued about the + orphaned file (if `force` is false), or deleted (if `force` is true). + """ + if data: + self.write_file(what, filename, data) + elif os.path.exists(filename): + if data is None and not force: + log.warn("%s not set in setup(), but %s exists", what, filename) + return + else: + self.delete_file(filename) + + def write_file(self, what, filename, data): + """Write `data` to `filename` (if not a dry run) after announcing it + + `what` is used in a log message to identify what is being written + to the file. + """ + log.info("writing %s to %s", what, filename) + data = data.encode("utf-8") + if not self.dry_run: + f = open(filename, 'wb') + f.write(data) + f.close() + + def delete_file(self, filename): + """Delete `filename` (if not a dry run) after announcing it""" + log.info("deleting %s", filename) + if not self.dry_run: + os.unlink(filename) + + def run(self): + self.mkpath(self.egg_info) + try: + os.utime(self.egg_info, None) + except OSError as e: + msg = f"Cannot update time stamp of directory '{self.egg_info}'" + raise distutils.errors.DistutilsFileError(msg) from e + for ep in metadata.entry_points(group='egg_info.writers'): + writer = ep.load() + writer(self, ep.name, os.path.join(self.egg_info, ep.name)) + + # Get rid of native_libs.txt if it was put there by older bdist_egg + nl = os.path.join(self.egg_info, "native_libs.txt") + if os.path.exists(nl): + self.delete_file(nl) + + self.find_sources() + + def find_sources(self): + """Generate SOURCES.txt manifest file""" + manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") + mm = manifest_maker(self.distribution) + mm.ignore_egg_info_dir = self.ignore_egg_info_in_manifest + mm.manifest = manifest_filename + mm.run() + self.filelist = mm.filelist + + +class FileList(_FileList): + # Implementations of the various MANIFEST.in commands + + def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir=False): + super().__init__(warn, debug_print) + self.ignore_egg_info_dir = ignore_egg_info_dir + + def process_template_line(self, line): + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dir_pattern). + (action, patterns, dir, dir_pattern) = self._parse_template_line(line) + + action_map = { + 'include': self.include, + 'exclude': self.exclude, + 'global-include': self.global_include, + 'global-exclude': self.global_exclude, + 'recursive-include': functools.partial( + self.recursive_include, + dir, + ), + 'recursive-exclude': functools.partial( + self.recursive_exclude, + dir, + ), + 'graft': self.graft, + 'prune': self.prune, + } + log_map = { + 'include': "warning: no files found matching '%s'", + 'exclude': ("warning: no previously-included files found " "matching '%s'"), + 'global-include': ( + "warning: no files found matching '%s' " "anywhere in distribution" + ), + 'global-exclude': ( + "warning: no previously-included files matching " + "'%s' found anywhere in distribution" + ), + 'recursive-include': ( + "warning: no files found matching '%s' " "under directory '%s'" + ), + 'recursive-exclude': ( + "warning: no previously-included files matching " + "'%s' found under directory '%s'" + ), + 'graft': "warning: no directories found matching '%s'", + 'prune': "no previously-included directories found matching '%s'", + } + + try: + process_action = action_map[action] + except KeyError: + raise DistutilsInternalError( + "this cannot happen: invalid action '{action!s}'".format(action=action), + ) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + + action_is_recursive = action.startswith('recursive-') + if action in {'graft', 'prune'}: + patterns = [dir_pattern] + extra_log_args = (dir,) if action_is_recursive else () + log_tmpl = log_map[action] + + self.debug_print( + ' '.join( + [action] + ([dir] if action_is_recursive else []) + patterns, + ) + ) + for pattern in patterns: + if not process_action(pattern): + log.warn(log_tmpl, pattern, *extra_log_args) + + def _remove_files(self, predicate): + """ + Remove all files from the file list that match the predicate. + Return True if any matching files were removed + """ + found = False + for i in range(len(self.files) - 1, -1, -1): + if predicate(self.files[i]): + self.debug_print(" removing " + self.files[i]) + del self.files[i] + found = True + return found + + def include(self, pattern): + """Include files that match 'pattern'.""" + found = [f for f in glob(pattern) if not os.path.isdir(f)] + self.extend(found) + return bool(found) + + def exclude(self, pattern): + """Exclude files that match 'pattern'.""" + match = translate_pattern(pattern) + return self._remove_files(match.match) + + def recursive_include(self, dir, pattern): + """ + Include all files anywhere in 'dir/' that match the pattern. + """ + full_pattern = os.path.join(dir, '**', pattern) + found = [f for f in glob(full_pattern, recursive=True) if not os.path.isdir(f)] + self.extend(found) + return bool(found) + + def recursive_exclude(self, dir, pattern): + """ + Exclude any file anywhere in 'dir/' that match the pattern. + """ + match = translate_pattern(os.path.join(dir, '**', pattern)) + return self._remove_files(match.match) + + def graft(self, dir): + """Include all files from 'dir/'.""" + found = [ + item + for match_dir in glob(dir) + for item in distutils.filelist.findall(match_dir) + ] + self.extend(found) + return bool(found) + + def prune(self, dir): + """Filter out files from 'dir/'.""" + match = translate_pattern(os.path.join(dir, '**')) + return self._remove_files(match.match) + + def global_include(self, pattern): + """ + Include all files anywhere in the current directory that match the + pattern. This is very inefficient on large file trees. + """ + if self.allfiles is None: + self.findall() + match = translate_pattern(os.path.join('**', pattern)) + found = [f for f in self.allfiles if match.match(f)] + self.extend(found) + return bool(found) + + def global_exclude(self, pattern): + """ + Exclude all files anywhere that match the pattern. + """ + match = translate_pattern(os.path.join('**', pattern)) + return self._remove_files(match.match) + + def append(self, item): + if item.endswith('\r'): # Fix older sdists built on Windows + item = item[:-1] + path = convert_path(item) + + if self._safe_path(path): + self.files.append(path) + + def extend(self, paths): + self.files.extend(filter(self._safe_path, paths)) + + def _repair(self): + """ + Replace self.files with only safe paths + + Because some owners of FileList manipulate the underlying + ``files`` attribute directly, this method must be called to + repair those paths. + """ + self.files = list(filter(self._safe_path, self.files)) + + def _safe_path(self, path): + enc_warn = "'%s' not %s encodable -- skipping" + + # To avoid accidental trans-codings errors, first to unicode + u_path = unicode_utils.filesys_decode(path) + if u_path is None: + log.warn("'%s' in unexpected encoding -- skipping" % path) + return False + + # Must ensure utf-8 encodability + utf8_path = unicode_utils.try_encode(u_path, "utf-8") + if utf8_path is None: + log.warn(enc_warn, path, 'utf-8') + return False + + try: + # ignore egg-info paths + is_egg_info = ".egg-info" in u_path or b".egg-info" in utf8_path + if self.ignore_egg_info_dir and is_egg_info: + return False + # accept is either way checks out + if os.path.exists(u_path) or os.path.exists(utf8_path): + return True + # this will catch any encode errors decoding u_path + except UnicodeEncodeError: + log.warn(enc_warn, path, sys.getfilesystemencoding()) + + +class manifest_maker(sdist): + template = "MANIFEST.in" + + def initialize_options(self): + self.use_defaults = 1 + self.prune = 1 + self.manifest_only = 1 + self.force_manifest = 1 + self.ignore_egg_info_dir = False + + def finalize_options(self): + pass + + def run(self): + self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir) + if not os.path.exists(self.manifest): + self.write_manifest() # it must exist so it'll get in the list + self.add_defaults() + if os.path.exists(self.template): + self.read_template() + self.add_license_files() + self._add_referenced_files() + self.prune_file_list() + self.filelist.sort() + self.filelist.remove_duplicates() + self.write_manifest() + + def _manifest_normalize(self, path): + path = unicode_utils.filesys_decode(path) + return path.replace(os.sep, '/') + + def write_manifest(self): + """ + Write the file list in 'self.filelist' to the manifest file + named by 'self.manifest'. + """ + self.filelist._repair() + + # Now _repairs should encodability, but not unicode + files = [self._manifest_normalize(f) for f in self.filelist.files] + msg = "writing manifest file '%s'" % self.manifest + self.execute(write_file, (self.manifest, files), msg) + + def warn(self, msg): + if not self._should_suppress_warning(msg): + sdist.warn(self, msg) + + @staticmethod + def _should_suppress_warning(msg): + """ + suppress missing-file warnings from sdist + """ + return re.match(r"standard file .*not found", msg) + + def add_defaults(self): + sdist.add_defaults(self) + self.filelist.append(self.template) + self.filelist.append(self.manifest) + rcfiles = list(walk_revctrl()) + if rcfiles: + self.filelist.extend(rcfiles) + elif os.path.exists(self.manifest): + self.read_manifest() + + if os.path.exists("setup.py"): + # setup.py should be included by default, even if it's not + # the script called to create the sdist + self.filelist.append("setup.py") + + ei_cmd = self.get_finalized_command('egg_info') + self.filelist.graft(ei_cmd.egg_info) + + def add_license_files(self): + license_files = self.distribution.metadata.license_files or [] + for lf in license_files: + log.info("adding license file '%s'", lf) + self.filelist.extend(license_files) + + def _add_referenced_files(self): + """Add files referenced by the config (e.g. `file:` directive) to filelist""" + referenced = getattr(self.distribution, '_referenced_files', []) + # ^-- fallback if dist comes from distutils or is a custom class + for rf in referenced: + log.debug("adding file referenced by config '%s'", rf) + self.filelist.extend(referenced) + + def prune_file_list(self): + build = self.get_finalized_command('build') + base_dir = self.distribution.get_fullname() + self.filelist.prune(build.build_base) + self.filelist.prune(base_dir) + sep = re.escape(os.sep) + self.filelist.exclude_pattern( + r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=1 + ) + + def _safe_data_files(self, build_py): + """ + The parent class implementation of this method + (``sdist``) will try to include data files, which + might cause recursion problems when + ``include_package_data=True``. + + Therefore, avoid triggering any attempt of + analyzing/building the manifest again. + """ + if hasattr(build_py, 'get_data_files_without_manifest'): + return build_py.get_data_files_without_manifest() + + SetuptoolsDeprecationWarning.emit( + "`build_py` command does not inherit from setuptools' `build_py`.", + """ + Custom 'build_py' does not implement 'get_data_files_without_manifest'. + Please extend command classes from setuptools instead of distutils. + """, + see_url="https://peps.python.org/pep-0632/", + # due_date not defined yet, old projects might still do it? + ) + return build_py.get_data_files() + + +def write_file(filename, contents): + """Create a file with the specified name and write 'contents' (a + sequence of strings without line terminators) to it. + """ + contents = "\n".join(contents) + + # assuming the contents has been vetted for utf-8 encoding + contents = contents.encode("utf-8") + + with open(filename, "wb") as f: # always write POSIX-style manifest + f.write(contents) + + +def write_pkg_info(cmd, basename, filename): + log.info("writing %s", filename) + if not cmd.dry_run: + metadata = cmd.distribution.metadata + metadata.version, oldver = cmd.egg_version, metadata.version + metadata.name, oldname = cmd.egg_name, metadata.name + + try: + # write unescaped data to PKG-INFO, so older pkg_resources + # can still parse it + metadata.write_pkg_info(cmd.egg_info) + finally: + metadata.name, metadata.version = oldname, oldver + + safe = getattr(cmd.distribution, 'zip_safe', None) + + bdist_egg.write_safety_flag(cmd.egg_info, safe) + + +def warn_depends_obsolete(cmd, basename, filename): + """ + Unused: left to avoid errors when updating (from source) from <= 67.8. + Old installations have a .dist-info directory with the entry-point + ``depends.txt = setuptools.command.egg_info:warn_depends_obsolete``. + This may trigger errors when running the first egg_info in build_meta. + TODO: Remove this function in a version sufficiently > 68. + """ + + +# Export API used in entry_points +write_requirements = _requirestxt.write_requirements +write_setup_requirements = _requirestxt.write_setup_requirements + + +def write_toplevel_names(cmd, basename, filename): + pkgs = dict.fromkeys( + [k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names()] + ) + cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') + + +def overwrite_arg(cmd, basename, filename): + write_arg(cmd, basename, filename, True) + + +def write_arg(cmd, basename, filename, force=False): + argname = os.path.splitext(basename)[0] + value = getattr(cmd.distribution, argname, None) + if value is not None: + value = '\n'.join(value) + '\n' + cmd.write_or_delete_file(argname, filename, value, force) + + +def write_entries(cmd, basename, filename): + eps = _entry_points.load(cmd.distribution.entry_points) + defn = _entry_points.render(eps) + cmd.write_or_delete_file('entry points', filename, defn, True) + + +def _egg_basename(egg_name, egg_version, py_version=None, platform=None): + """Compute filename of the output egg. Private API.""" + name = _normalization.filename_component(egg_name) + version = _normalization.filename_component(egg_version) + egg = f"{name}-{version}-py{py_version or PY_MAJOR}" + if platform: + egg += f"-{platform}" + return egg + + +class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): + """Deprecated behavior warning for EggInfo, bypassing suppression.""" From dc946aa6b0c7cf8b799c1f680219bc627d3c6581 Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 27 Oct 2023 15:34:52 -0400 Subject: [PATCH 07/25] Update .gitignore --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index f06484974d..90ae80505e 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,6 @@ lib distribute.egg-info setuptools.egg-info .coverage -test_* .eggs .tox .venv From f0697b484c9c89dc0b54c66269b6f700050b1221 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 21 Nov 2023 19:44:46 -0500 Subject: [PATCH 08/25] No unused type: ignore --- mypy.ini | 5 ++--- pkg_resources/__init__.py | 3 +-- setuptools/config/pyprojecttoml.py | 2 +- setuptools/msvc.py | 16 +++++++++------- setuptools/tests/_packaging_compat.py | 2 +- setuptools/tests/test_bdist_egg.py | 2 +- setuptools/tests/test_editable_install.py | 2 +- 7 files changed, 16 insertions(+), 16 deletions(-) diff --git a/mypy.ini b/mypy.ini index c01d06a310..b007c27bf2 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2,6 +2,7 @@ # CI should test for all versions, local development gets hints for oldest supported python_version = 3.8 strict = False +warn_unused_ignores = True # TODO: Not all dependencies are typed. setuptools itself should be typed too # TODO: Test environment is not yet properly configured to install all imported packages ignore_missing_imports = True @@ -16,7 +17,5 @@ exclude = (?x)( ) # https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993 -[mypy-pkg_resources.extern.*] -ignore_missing_imports = True -[mypy-setuptools.extern.*] +[mypy-pkg_resources.extern.*,setuptools.extern.*] ignore_missing_imports = True diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 3ac511f072..a82073479f 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -2227,8 +2227,7 @@ def resolve_egg_link(path): register_finder(pkgutil.ImpImporter, find_on_path) # TODO: If importlib_machinery import fails, this will also fail. This should be fixed. -# https://github.com/pypa/setuptools/pull/3979/files#r1367959803 -register_finder(importlib_machinery.FileFinder, find_on_path) # type: ignore[no-untyped-call] +register_finder(importlib_machinery.FileFinder, find_on_path) _declare_state('dict', _namespace_handlers={}) _declare_state('dict', _namespace_packages={}) diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py index a023365a69..67ca6d29a3 100644 --- a/setuptools/config/pyprojecttoml.py +++ b/setuptools/config/pyprojecttoml.py @@ -271,7 +271,7 @@ def _ensure_previously_set(self, dist: "Distribution", field: str): def _expand_directive( self, specifier: str, directive, package_dir: Mapping[str, str] ): - from setuptools.extern.more_itertools import always_iterable # type: ignore + from setuptools.extern.more_itertools import always_iterable with _ignore_errors(self.ignore_option_errors): root_dir = self.root_dir diff --git a/setuptools/msvc.py b/setuptools/msvc.py index f8d0510065..1e11a73be1 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -21,21 +21,23 @@ import itertools import subprocess import distutils.errors +from typing import Dict, TYPE_CHECKING from setuptools.extern.more_itertools import unique_everseen -if platform.system() == 'Windows': +# https://github.com/python/mypy/issues/8166 +if not TYPE_CHECKING and platform.system() == 'Windows': import winreg from os import environ else: # Mock winreg and environ so the module can be imported on this platform. - class winreg: # type: ignore[no-redef] # https://github.com/python/mypy/issues/8166 + class winreg: HKEY_USERS = None HKEY_CURRENT_USER = None HKEY_LOCAL_MACHINE = None HKEY_CLASSES_ROOT = None - environ = dict() # type: ignore[assignment] # https://github.com/python/mypy/issues/8166 + environ: Dict[str, str] = dict() def _msvc14_find_vc2015(): @@ -407,10 +409,10 @@ class RegistryInfo: """ HKEYS = ( - winreg.HKEY_USERS, # type: ignore[attr-defined] - winreg.HKEY_CURRENT_USER, # type: ignore[attr-defined] - winreg.HKEY_LOCAL_MACHINE, # type: ignore[attr-defined] - winreg.HKEY_CLASSES_ROOT, # type: ignore[attr-defined] + winreg.HKEY_USERS, + winreg.HKEY_CURRENT_USER, + winreg.HKEY_LOCAL_MACHINE, + winreg.HKEY_CLASSES_ROOT, ) def __init__(self, platform_info): diff --git a/setuptools/tests/_packaging_compat.py b/setuptools/tests/_packaging_compat.py index 5d48634ed8..a34b261d72 100644 --- a/setuptools/tests/_packaging_compat.py +++ b/setuptools/tests/_packaging_compat.py @@ -3,7 +3,7 @@ from packaging import __version__ as packaging_version if TYPE_CHECKING or tuple(packaging_version.split(".")) >= ("23", "2"): - from packaging.metadata import Metadata # type: ignore[attr-defined] + from packaging.metadata import Metadata else: # Just pretend it exists while waiting for release... from unittest.mock import MagicMock diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py index 85c5043500..45dd070967 100644 --- a/setuptools/tests/test_bdist_egg.py +++ b/setuptools/tests/test_bdist_egg.py @@ -47,7 +47,7 @@ def test_bdist_egg(self, setup_context, user_override): assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content) @pytest.mark.xfail( - os.environ.get('PYTHONDONTWRITEBYTECODE'), # type: ignore[arg-type] # https://github.com/pytest-dev/pytest/issues/10094 + os.environ.get('PYTHONDONTWRITEBYTECODE'), reason="Byte code disabled", ) def test_exclude_source_files(self, setup_context, user_override): diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index eeffcf1962..f09c00b66c 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -125,7 +125,7 @@ def editable_opts(request): @pytest.mark.parametrize( "files", [ - {**EXAMPLE, "setup.py": SETUP_SCRIPT_STUB}, # type: ignore + {**EXAMPLE, "setup.py": SETUP_SCRIPT_STUB}, EXAMPLE, # No setup.py script ], ) From baa555bd004329672e5f1dc82a8630ade6f3b679 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 21 Nov 2023 20:00:12 -0500 Subject: [PATCH 09/25] TypeError: 'ABCMeta' object is not subscriptable --- setuptools/command/easy_install.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 16f8cdf739..104b0f0147 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -79,7 +79,8 @@ from .._path import ensure_directory from ..extern.jaraco.text import yield_lines -_FileDescriptorOrPath = Union[int, str, bytes, os.PathLike[str], os.PathLike[bytes]] +if TYPE_CHECKING: + _FileDescriptorOrPath = Union[int, str, bytes, os.PathLike[str], os.PathLike[bytes]] # Turn on PEP440Warnings @@ -2028,7 +2029,7 @@ def is_python_script(script_text, filename): except ImportError: # Jython compatibility def _chmod( - path: _FileDescriptorOrPath, + path: "_FileDescriptorOrPath", mode: int, *, dir_fd: Optional[int] = None, From c367b9fa362c25a93f1dd935e05b0c3293e23d33 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 21 Nov 2023 20:26:54 -0500 Subject: [PATCH 10/25] Fix RuffError --- pkg_resources/__init__.py | 2 +- setuptools/dist.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index a82073479f..d2ca406d29 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -23,7 +23,7 @@ import time import re import types -from typing import TYPE_CHECKING, Optional +from typing import Optional import zipfile import zipimport import warnings diff --git a/setuptools/dist.py b/setuptools/dist.py index 5d65c1f50d..bb9e051ce6 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -10,7 +10,7 @@ from contextlib import suppress from glob import iglob from pathlib import Path -from typing import Dict, List, MutableMapping, Optional, Sequence, Set, Tuple +from typing import Dict, List, MutableMapping, Optional, Set, Tuple import distutils.cmd import distutils.command From f76def799e7c6c4092e26c6f1e286fdd7e34a339 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 24 Jan 2024 12:45:16 -0500 Subject: [PATCH 11/25] Fix post-merge mypy issues --- setuptools/command/editable_wheel.py | 2 +- setuptools/py311compat.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 76b71d7222..51bad29858 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -600,7 +600,7 @@ def _simple_layout( layout = {pkg: find_package_path(pkg, package_dir, project_dir) for pkg in packages} if not layout: return set(package_dir) in ({}, {""}) - parent = os.path.commonpath(starmap(_parent_path, layout.items())) + parent = os.path.commonpath(starmap(_parent_path, layout.items())) # type: ignore[call-overload] # FIXME upstream return all( _path.same_path(Path(parent, *key.split('.')), value) for key, value in layout.items() diff --git a/setuptools/py311compat.py b/setuptools/py311compat.py index 9231cbb290..0171871589 100644 --- a/setuptools/py311compat.py +++ b/setuptools/py311compat.py @@ -4,4 +4,4 @@ if sys.version_info >= (3, 11): import tomllib else: # pragma: no cover - from setuptools.extern import tomli as tomllib + from setuptools.extern import tomli as tomllib # type: ignore[attr-defined] From 8fbe1bff8ea8b1787fdf13b338c1029b3c834952 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 14 Feb 2024 13:24:22 -0500 Subject: [PATCH 12/25] RUff format --- setuptools/command/build_ext.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 8c96292286..ca6adf859f 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -382,7 +382,10 @@ def _compile_and_remove_stub(self, stub_file: str): optimize = self.get_finalized_command('install_lib').optimize # type: ignore[attr-defined] # TODO: Fix in distutils stubs if optimize > 0: byte_compile( - [stub_file], optimize=optimize, force=True, dry_run=self.dry_run # type: ignore[attr-defined] # TODO: Fix in distutils stubs + [stub_file], + optimize=optimize, + force=True, + dry_run=self.dry_run, # type: ignore[attr-defined] # TODO: Fix in distutils stubs ) if os.path.exists(stub_file) and not self.dry_run: # type: ignore[attr-defined] # TODO: Fix in distutils stubs os.unlink(stub_file) From 83c2b3d3f26fa240a13c6aed2746119f96f7e556 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 15 Feb 2024 18:10:17 -0500 Subject: [PATCH 13/25] Ignore more generated files --- .gitignore | 1 + mypy.ini | 1 + setuptools/config/_validate_pyproject/__init__.py | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 90ae80505e..35dad3f8cc 100644 --- a/.gitignore +++ b/.gitignore @@ -18,5 +18,6 @@ setuptools.egg-info .hg* .cache .idea/ +.vscode/ .pytest_cache/ .mypy_cache/ diff --git a/mypy.ini b/mypy.ini index b007c27bf2..2d6ace85c2 100644 --- a/mypy.ini +++ b/mypy.ini @@ -14,6 +14,7 @@ exclude = (?x)( | ^pkg_resources/tests/data/my-test-package-source/setup.py$ # Duplicate module name | ^.+?/(_vendor|extern)/ # Vendored | ^setuptools/_distutils/ # Vendored + | ^setuptools/config/_validate_pyproject/ # Auto-generated ) # https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993 diff --git a/setuptools/config/_validate_pyproject/__init__.py b/setuptools/config/_validate_pyproject/__init__.py index cddc1599ac..dbe6cb4ca4 100644 --- a/setuptools/config/_validate_pyproject/__init__.py +++ b/setuptools/config/_validate_pyproject/__init__.py @@ -5,7 +5,7 @@ from .error_reporting import detailed_errors, ValidationError from .extra_validations import EXTRA_VALIDATIONS from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException -from .fastjsonschema_validations import validate as _validate # type: ignore[attr-defined] # mypy false-positive. Pyright is fine here +from .fastjsonschema_validations import validate as _validate __all__ = [ "validate", From f4e1d215ad28985b13dcf7f3b401c8a1ea9fec9c Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 19 Feb 2024 00:49:14 -0500 Subject: [PATCH 14/25] Disable more mypy errors --- mypy.ini | 4 ++++ setuptools/command/build_ext.py | 1 + setuptools/tests/test_bdist_egg.py | 2 +- 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/mypy.ini b/mypy.ini index 2d6ace85c2..65fa60bc7a 100644 --- a/mypy.ini +++ b/mypy.ini @@ -20,3 +20,7 @@ exclude = (?x)( # https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993 [mypy-pkg_resources.extern.*,setuptools.extern.*] ignore_missing_imports = True + +# This should already be exluded, likely this issue: https://github.com/python/mypy/issues/10946 +[mypy-setuptools.config._validate_pyproject] +disable_error_code = attr-defined diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index ca6adf859f..668255d791 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -85,6 +85,7 @@ def get_abi3_suffix(): class build_ext(_build_ext): editable_mode: bool = False inplace: bool = False + # override distutils.dist.Distribution with setuptools' distribution: "Distribution" def run(self): diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py index 0e473d168b..4d18e3214b 100644 --- a/setuptools/tests/test_bdist_egg.py +++ b/setuptools/tests/test_bdist_egg.py @@ -47,7 +47,7 @@ def test_bdist_egg(self, setup_context, user_override): assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content) @pytest.mark.xfail( - os.environ.get('PYTHONDONTWRITEBYTECODE'), + os.environ.get('PYTHONDONTWRITEBYTECODE'), # type: ignore[arg-type] # Unintended usage of xfail reason="Byte code disabled", ) def test_exclude_source_files(self, setup_context, user_override): From 6b5bbcacf18fc1f254aced19cf94bfd543ac8f36 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 19 Feb 2024 16:37:21 -0500 Subject: [PATCH 15/25] Globally ignore attr-defined for now --- mypy.ini | 9 +++++---- pkg_resources/__init__.py | 4 ++-- pkg_resources/tests/test_resources.py | 2 +- setuptools/_importlib.py | 4 ++-- setuptools/_normalization.py | 2 +- setuptools/command/build_ext.py | 12 ++++++------ setuptools/command/build_py.py | 2 +- setuptools/command/easy_install.py | 1 + setuptools/command/egg_info.py | 2 +- setuptools/compat/py310.py | 2 +- setuptools/dist.py | 2 +- 11 files changed, 22 insertions(+), 20 deletions(-) diff --git a/mypy.ini b/mypy.ini index 65fa60bc7a..33dd7163ff 100644 --- a/mypy.ini +++ b/mypy.ini @@ -16,11 +16,12 @@ exclude = (?x)( | ^setuptools/_distutils/ # Vendored | ^setuptools/config/_validate_pyproject/ # Auto-generated ) +# Ignoring attr-defined because setuptools wraps a lot of distutils classes, adding new attributes, +# w/o updating all the attributes and return types from the base classes for type-checkers to understand +# Especially with setuptools.dist.command vs distutils.dist.command vs setuptoos._distutils.dist.command +# *.extern modules that actually live in *._vendor will also cause attr-defined issues on import +disable_error_code = attr-defined # https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993 [mypy-pkg_resources.extern.*,setuptools.extern.*] ignore_missing_imports = True - -# This should already be exluded, likely this issue: https://github.com/python/mypy/issues/10946 -[mypy-setuptools.config._validate_pyproject] -disable_error_code = attr-defined diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index e8812fed4a..10c6a9cd06 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -71,8 +71,8 @@ join_continuation, ) -from pkg_resources.extern import platformdirs # type: ignore[attr-defined] -from pkg_resources.extern import packaging # type: ignore[attr-defined] +from pkg_resources.extern import platformdirs +from pkg_resources.extern import packaging __import__('pkg_resources.extern.packaging.version') __import__('pkg_resources.extern.packaging.specifiers') diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py index ea296a56b4..5b2308aea7 100644 --- a/pkg_resources/tests/test_resources.py +++ b/pkg_resources/tests/test_resources.py @@ -5,7 +5,7 @@ import itertools import pytest -from pkg_resources.extern import packaging # type: ignore[attr-defined] +from pkg_resources.extern import packaging import pkg_resources from pkg_resources import ( diff --git a/setuptools/_importlib.py b/setuptools/_importlib.py index c8344b7cf0..bd2b01e2b5 100644 --- a/setuptools/_importlib.py +++ b/setuptools/_importlib.py @@ -38,7 +38,7 @@ def disable_importlib_metadata_finder(metadata): if sys.version_info < (3, 10): - from setuptools.extern import importlib_metadata as metadata # type: ignore[attr-defined] + from setuptools.extern import importlib_metadata as metadata disable_importlib_metadata_finder(metadata) else: @@ -46,6 +46,6 @@ def disable_importlib_metadata_finder(metadata): if sys.version_info < (3, 9): - from setuptools.extern import importlib_resources as resources # type: ignore[attr-defined] + from setuptools.extern import importlib_resources as resources else: import importlib.resources as resources # noqa: F401 diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py index e8acbaeaff..dd2c5982d8 100644 --- a/setuptools/_normalization.py +++ b/setuptools/_normalization.py @@ -5,7 +5,7 @@ import re -from .extern import packaging # type: ignore[attr-defined] +from .extern import packaging # https://packaging.python.org/en/latest/specifications/core-metadata/#name diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 668255d791..41bf202998 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -133,7 +133,7 @@ def _get_output_mapping(self) -> Iterator[Tuple[str, str]]: return build_py = self.get_finalized_command('build_py') - opt = self.get_finalized_command('install_lib').optimize or "" # type: ignore[attr-defined] # TODO: Fix in distutils stubs + opt = self.get_finalized_command('install_lib').optimize or "" for ext in self.extensions: inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext) @@ -344,7 +344,7 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False): log.info("writing stub loader for %s to %s", ext._full_name, stub_file) if compile and os.path.exists(stub_file): raise BaseError(stub_file + " already exists! Please delete.") - if not self.dry_run: # type: ignore[attr-defined] # TODO: Fix in distutils stubs + if not self.dry_run: f = open(stub_file, 'w') f.write( '\n'.join([ @@ -379,16 +379,16 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False): def _compile_and_remove_stub(self, stub_file: str): from distutils.util import byte_compile - byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run) # type: ignore[attr-defined] # TODO: Fix in distutils stubs - optimize = self.get_finalized_command('install_lib').optimize # type: ignore[attr-defined] # TODO: Fix in distutils stubs + byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run) + optimize = self.get_finalized_command('install_lib').optimize if optimize > 0: byte_compile( [stub_file], optimize=optimize, force=True, - dry_run=self.dry_run, # type: ignore[attr-defined] # TODO: Fix in distutils stubs + dry_run=self.dry_run, ) - if os.path.exists(stub_file) and not self.dry_run: # type: ignore[attr-defined] # TODO: Fix in distutils stubs + if os.path.exists(stub_file) and not self.dry_run: os.unlink(stub_file) diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index 4861a9a366..3f40b060b3 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -218,7 +218,7 @@ def _filter_build_files(self, files: Iterable[str], egg_info: str) -> Iterator[s This function should filter this case of invalid files out. """ build = self.get_finalized_command("build") - build_dirs = (egg_info, self.build_lib, build.build_temp, build.build_base) # type: ignore[attr-defined] # TODO: Fix in distutils stubs + build_dirs = (egg_info, self.build_lib, build.build_temp, build.build_base) norm_dirs = [os.path.normpath(p) for p in build_dirs if p] for file in files: diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 8c1957482b..e253cdf9ba 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -80,6 +80,7 @@ from ..extern.jaraco.text import yield_lines if TYPE_CHECKING: + # Same as _typeshed.FileDescriptorOrPath _FileDescriptorOrPath = Union[int, str, bytes, os.PathLike[str], os.PathLike[bytes]] diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index ac65c1acd9..62d2feea9b 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -27,7 +27,7 @@ import setuptools.unicode_utils as unicode_utils from setuptools.glob import glob -from setuptools.extern import packaging # type: ignore[attr-defined] +from setuptools.extern import packaging from ..warnings import SetuptoolsDeprecationWarning diff --git a/setuptools/compat/py310.py b/setuptools/compat/py310.py index 5a17b9910c..f7d53d6de9 100644 --- a/setuptools/compat/py310.py +++ b/setuptools/compat/py310.py @@ -7,4 +7,4 @@ if sys.version_info >= (3, 11): import tomllib else: # pragma: no cover - from setuptools.extern import tomli as tomllib # type: ignore[attr-defined] + from setuptools.extern import tomli as tomllib diff --git a/setuptools/dist.py b/setuptools/dist.py index 7ab8c26dc3..9eed330c51 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -295,7 +295,7 @@ def __init__(self, attrs: Optional[MutableMapping] = None) -> None: self.dependency_links = attrs.pop('dependency_links', []) self.setup_requires = attrs.pop('setup_requires', []) for ep in metadata.entry_points(group='distutils.setup_keywords'): - vars(self).setdefault(ep.name, None) # type: ignore[attr-defined] # https://github.com/python/mypy/issues/14458 + vars(self).setdefault(ep.name, None) metadata_only = set(self._DISTUTILS_UNSUPPORTED_METADATA) metadata_only -= {"install_requires", "extras_require"} From ecac670d25bf2a12b4c4c1762b836178caf89cbb Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 19 Feb 2024 16:52:31 -0500 Subject: [PATCH 16/25] Update more comments --- setuptools/command/build_ext.py | 3 --- setuptools/sandbox.py | 8 ++++---- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 41bf202998..03902fb9cd 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -14,7 +14,6 @@ from setuptools.extension import Extension, Library if TYPE_CHECKING: - from setuptools.dist import Distribution from distutils.command.build_ext import build_ext as _build_ext else: try: @@ -85,8 +84,6 @@ def get_abi3_suffix(): class build_ext(_build_ext): editable_mode: bool = False inplace: bool = False - # override distutils.dist.Distribution with setuptools' - distribution: "Distribution" def run(self): """Build extensions in build directory, then copy if --inplace""" diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py index a7a642e7c8..6c095e029e 100644 --- a/setuptools/sandbox.py +++ b/setuptools/sandbox.py @@ -299,7 +299,7 @@ def run(self, func): with self: return func() - def _mk_dual_path_wrapper(name: str): # type: ignore[misc] # TODO: Extract or make static + def _mk_dual_path_wrapper(name: str): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099 original = getattr(_os, name) def wrap(self, src, dst, *args, **kw): @@ -313,7 +313,7 @@ def wrap(self, src, dst, *args, **kw): if hasattr(_os, name): locals()[name] = _mk_dual_path_wrapper(name) - def _mk_single_path_wrapper(name: str, original=None): # type: ignore[misc] # TODO: Extract or make static + def _mk_single_path_wrapper(name: str, original=None): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099 original = original or getattr(_os, name) def wrap(self, path, *args, **kw): @@ -350,7 +350,7 @@ def wrap(self, path, *args, **kw): if hasattr(_os, name): locals()[name] = _mk_single_path_wrapper(name) - def _mk_single_with_return(name: str): # type: ignore[misc] # TODO: Extract or make static + def _mk_single_with_return(name: str): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099 original = getattr(_os, name) def wrap(self, path, *args, **kw): @@ -365,7 +365,7 @@ def wrap(self, path, *args, **kw): if hasattr(_os, name): locals()[name] = _mk_single_with_return(name) - def _mk_query(name: str): # type: ignore[misc] # TODO: Extract or make static + def _mk_query(name: str): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099 original = getattr(_os, name) def wrap(self, *args, **kw): From f3779b15ea638b5f9d4c29c0b06bfaae4c2a8f0b Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 21 Feb 2024 23:25:21 -0500 Subject: [PATCH 17/25] Address PR comments and fix new exposed typing issues --- .gitignore | 1 - mypy.ini | 24 ++++++++----- setuptools/__init__.py | 13 ++++--- setuptools/_vendor/packaging/_manylinux.py | 2 +- setuptools/command/build_ext.py | 26 +++++++------- setuptools/command/dist_info.py | 4 ++- setuptools/command/easy_install.py | 11 ++---- setuptools/command/editable_wheel.py | 40 ++++++++++++++++------ setuptools/command/install.py | 5 ++- setuptools/command/upload_docs.py | 2 +- setuptools/config/_apply_pyprojecttoml.py | 3 +- setuptools/dist.py | 10 ++++-- setuptools/extension.py | 10 ++++-- setuptools/tests/test_bdist_egg.py | 2 +- setuptools/tests/test_egg_info.py | 2 +- 15 files changed, 95 insertions(+), 60 deletions(-) diff --git a/.gitignore b/.gitignore index 35dad3f8cc..90ae80505e 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,5 @@ setuptools.egg-info .hg* .cache .idea/ -.vscode/ .pytest_cache/ .mypy_cache/ diff --git a/mypy.ini b/mypy.ini index 33dd7163ff..7df314a5ae 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3,9 +3,6 @@ python_version = 3.8 strict = False warn_unused_ignores = True -# TODO: Not all dependencies are typed. setuptools itself should be typed too -# TODO: Test environment is not yet properly configured to install all imported packages -ignore_missing_imports = True # required to support namespace packages: https://github.com/python/mypy/issues/14057 explicit_package_bases = True exclude = (?x)( @@ -16,12 +13,23 @@ exclude = (?x)( | ^setuptools/_distutils/ # Vendored | ^setuptools/config/_validate_pyproject/ # Auto-generated ) -# Ignoring attr-defined because setuptools wraps a lot of distutils classes, adding new attributes, -# w/o updating all the attributes and return types from the base classes for type-checkers to understand -# Especially with setuptools.dist.command vs distutils.dist.command vs setuptoos._distutils.dist.command -# *.extern modules that actually live in *._vendor will also cause attr-defined issues on import -disable_error_code = attr-defined +disable_error_code = + # TODO: Test environment is not yet properly configured to install all imported packages + # import-not-found, # This can be left commented out for local runs until we enforce running mypy in the CI + # TODO: Not all dependencies are typed. Nanely: distutils._modified, wheel.wheelfile, and jaraco.* + import-untyped, + # Ignoring attr-defined because setuptools wraps a lot of distutils classes, adding new attributes, + # w/o updating all the attributes and return types from the base classes for type-checkers to understand + # Especially with setuptools.dist.command vs distutils.dist.command vs setuptools._distutils.dist.command + # *.extern modules that actually live in *._vendor will also cause attr-defined issues on import + attr-defined +# Avoid raising issues when importing from "extern" modules, as those are added to path dynamically. # https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993 [mypy-pkg_resources.extern.*,setuptools.extern.*] ignore_missing_imports = True + +# Tests include creating dynamic modules that won't exists statically before the test is run. +# Let's ignore all "import-not-found", as if an import really wasn't found, then the test would fail. +[mypy-pkg_resources.tests.*,setuptools.tests.*] +disable_error_code = import-not-found, import-untyped diff --git a/setuptools/__init__.py b/setuptools/__init__.py index 7973eabb4b..7c88c7e19b 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -3,6 +3,7 @@ import functools import os import re +from typing import TYPE_CHECKING import _distutils_hack.override # noqa: F401 import distutils.core @@ -105,11 +106,14 @@ def setup(**attrs): setup.__doc__ = distutils.core.setup.__doc__ +if TYPE_CHECKING: + # Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962 + _Command = distutils.core.Command +else: + _Command = monkey.get_unpatched(distutils.core.Command) -_Command = monkey.get_unpatched(distutils.core.Command) - -class Command(_Command): # type: ignore[valid-type, misc] # https://github.com/python/mypy/issues/14458 +class Command(_Command): """ Setuptools internal actions are organized using a *command design pattern*. This means that each action (or group of closely related actions) executed during @@ -165,8 +169,9 @@ class Command(_Command): # type: ignore[valid-type, misc] # https://github.com """ command_consumes_arguments = False + distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution - def __init__(self, dist, **kw): + def __init__(self, dist: Distribution, **kw): """ Construct the command for dist, updating vars(self) with any keyword parameters. diff --git a/setuptools/_vendor/packaging/_manylinux.py b/setuptools/_vendor/packaging/_manylinux.py index 449c655be6..cd868ddcea 100644 --- a/setuptools/_vendor/packaging/_manylinux.py +++ b/setuptools/_vendor/packaging/_manylinux.py @@ -173,7 +173,7 @@ def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool: return False # Check for presence of _manylinux module. try: - import _manylinux # noqa + import _manylinux # type: ignore[import-not-found] # noqa # Expected could be missing except ImportError: return True if hasattr(_manylinux, "manylinux_compatible"): diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 03902fb9cd..982536c6a8 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -3,9 +3,10 @@ import itertools from importlib.machinery import EXTENSION_SUFFIXES from importlib.util import cache_from_source as _compiled_file_name -from typing import TYPE_CHECKING, Dict, Iterator, List, Tuple +from typing import Dict, Iterator, List, Tuple from pathlib import Path +from distutils.command.build_ext import build_ext as _du_build_ext from distutils.ccompiler import new_compiler from distutils.sysconfig import customize_compiler, get_config_var from distutils import log @@ -13,22 +14,19 @@ from setuptools.errors import BaseError from setuptools.extension import Extension, Library -if TYPE_CHECKING: - from distutils.command.build_ext import build_ext as _build_ext -else: - try: - # Attempt to use Cython for building extensions, if available - from Cython.Distutils.build_ext import build_ext as _build_ext +try: + # Attempt to use Cython for building extensions, if available + from Cython.Distutils.build_ext import build_ext as _build_ext - # Additionally, assert that the compiler module will load - # also. Ref #1229. - __import__('Cython.Compiler.Main') - except ImportError: - from distutils.command.build_ext import build_ext as _build_ext + # Additionally, assert that the compiler module will load + # also. Ref #1229. + __import__('Cython.Compiler.Main') +except ImportError: + _build_ext = _du_build_ext # make sure _config_vars is initialized get_config_var("LDSHARED") -from distutils.sysconfig import _config_vars as _CONFIG_VARS # type: ignore # noqa # Not publicly exposed in distutils stubs +from distutils.sysconfig import _config_vars as _CONFIG_VARS # type: ignore # noqa # Not publicly exposed in typeshed distutils stubs def _customize_compiler_for_shlib(compiler): @@ -60,7 +58,7 @@ def _customize_compiler_for_shlib(compiler): use_stubs = True elif os.name != 'nt': try: - import dl + import dl # type: ignore[import-not-found] # https://github.com/python/mypy/issues/13002 use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW') except ImportError: diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py index f5061afaaf..52c0721903 100644 --- a/setuptools/command/dist_info.py +++ b/setuptools/command/dist_info.py @@ -9,8 +9,10 @@ from distutils import log from distutils.core import Command from pathlib import Path +from typing import cast from .. import _normalization +from .egg_info import egg_info as egg_info_cls class dist_info(Command): @@ -50,7 +52,7 @@ def finalize_options(self): project_dir = dist.src_root or os.curdir self.output_dir = Path(self.output_dir or project_dir) - egg_info = self.reinitialize_command("egg_info") + egg_info = cast(egg_info_cls, self.reinitialize_command("egg_info")) egg_info.egg_base = str(self.output_dir) if self.tag_date: diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index e253cdf9ba..d0ce62adc2 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -25,7 +25,7 @@ from distutils.command import install import sys import os -from typing import TYPE_CHECKING, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Dict, List, Union import zipimport import shutil import tempfile @@ -44,7 +44,6 @@ import configparser import sysconfig - from sysconfig import get_path from setuptools import Command @@ -2020,13 +2019,7 @@ def is_python_script(script_text, filename): from os import chmod as _chmod except ImportError: # Jython compatibility - def _chmod( - path: "_FileDescriptorOrPath", - mode: int, - *, - dir_fd: Optional[int] = None, - follow_symlinks: bool = True, - ) -> None: + def _chmod(*args: object, **kwargs: object) -> None: # type: ignore[misc] # Mypy re-uses the imported definition anyway pass diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 51bad29858..894acd4843 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -34,6 +34,7 @@ Tuple, TypeVar, Union, + cast, ) from .. import ( @@ -50,7 +51,12 @@ SetuptoolsDeprecationWarning, SetuptoolsWarning, ) +from .build import build as build_cls from .build_py import build_py as build_py_cls +from .dist_info import dist_info as dist_info_cls +from .egg_info import egg_info as egg_info_cls +from .install import install as install_cls +from .install_scripts import install_scripts as install_scripts_cls if TYPE_CHECKING: from wheel.wheelfile import WheelFile # noqa @@ -156,7 +162,7 @@ def run(self): def _ensure_dist_info(self): if self.dist_info_dir is None: - dist_info = self.reinitialize_command("dist_info") + dist_info = cast(dist_info_cls, self.reinitialize_command("dist_info")) dist_info.output_dir = self.dist_dir dist_info.ensure_finalized() dist_info.run() @@ -203,12 +209,18 @@ def _configure_build( scripts = str(Path(unpacked_wheel, f"{name}.data", "scripts")) # egg-info may be generated again to create a manifest (used for package data) - egg_info = dist.reinitialize_command("egg_info", reinit_subcommands=True) + egg_info = cast( + egg_info_cls, dist.reinitialize_command("egg_info", reinit_subcommands=True) + ) egg_info.egg_base = str(tmp_dir) egg_info.ignore_egg_info_in_manifest = True - build = dist.reinitialize_command("build", reinit_subcommands=True) - install = dist.reinitialize_command("install", reinit_subcommands=True) + build = cast( + build_cls, dist.reinitialize_command("build", reinit_subcommands=True) + ) + install = cast( + install_cls, dist.reinitialize_command("install", reinit_subcommands=True) + ) build.build_platlib = build.build_purelib = build.build_lib = build_lib install.install_purelib = install.install_platlib = install.install_lib = wheel @@ -216,12 +228,14 @@ def _configure_build( install.install_headers = headers install.install_data = data - install_scripts = dist.get_command_obj("install_scripts") + install_scripts = cast( + install_scripts_cls, dist.get_command_obj("install_scripts") + ) install_scripts.no_ep = True build.build_temp = str(tmp_dir) - build_py = dist.get_command_obj("build_py") + build_py = cast(build_py_cls, dist.get_command_obj("build_py")) build_py.compile = False build_py.existing_egg_info_dir = self._find_egg_info_dir() @@ -233,7 +247,7 @@ def _configure_build( def _set_editable_mode(self): """Set the ``editable_mode`` flag in the build sub-commands""" dist = self.distribution - build = dist.get_command_obj("build") + build = cast(build_cls, dist.get_command_obj("build")) for cmd_name in build.get_sub_commands(): cmd = dist.get_command_obj(cmd_name) if hasattr(cmd, "editable_mode"): @@ -280,7 +294,7 @@ def _run_build_subcommands(self) -> None: # TODO: Once plugins/customisations had the chance to catch up, replace # `self._run_build_subcommands()` with `self.run_command("build")`. # Also remove _safely_run, TestCustomBuildPy. Suggested date: Aug/2023. - build: Command = self.get_finalized_command("build") + build = self.get_finalized_command("build") for name in build.get_sub_commands(): cmd = self.get_finalized_command(name) if name == "build_py" and type(cmd) != build_py_cls: @@ -429,7 +443,8 @@ def __init__( ): self.auxiliary_dir = Path(auxiliary_dir) self.build_lib = Path(build_lib).resolve() - self._file = dist.get_command_obj("build_py").copy_file + # TODO: Update typeshed distutils stubs to overload non-None return type by default + self._file = dist.get_command_obj("build_py").copy_file # type: ignore[union-attr] super().__init__(dist, name, [self.auxiliary_dir]) def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]): @@ -447,7 +462,9 @@ def _create_file(self, relative_output: str, src_file: str, link=None): dest = self.auxiliary_dir / relative_output if not dest.parent.is_dir(): dest.parent.mkdir(parents=True) - self._file(src_file, dest, link=link) + # TODO: Update typeshed distutils stubs so distutils.cmd.Command.copy_file, accepts PathLike + # same with methods used by copy_file + self._file(src_file, dest, link=link) # type: ignore[arg-type] def _create_links(self, outputs, output_mapping): self.auxiliary_dir.mkdir(parents=True, exist_ok=True) @@ -600,7 +617,8 @@ def _simple_layout( layout = {pkg: find_package_path(pkg, package_dir, project_dir) for pkg in packages} if not layout: return set(package_dir) in ({}, {""}) - parent = os.path.commonpath(starmap(_parent_path, layout.items())) # type: ignore[call-overload] # FIXME upstream + # TODO: has been fixed upstream, waiting for new mypy release https://github.com/python/typeshed/pull/11310 + parent = os.path.commonpath(starmap(_parent_path, layout.items())) # type: ignore[call-overload] return all( _path.same_path(Path(parent, *key.split('.')), value) for key, value in layout.items() diff --git a/setuptools/command/install.py b/setuptools/command/install.py index b97a9b4713..56c1155b50 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -3,9 +3,11 @@ import glob import platform import distutils.command.install as orig +from typing import cast import setuptools from ..warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning +from .bdist_egg import bdist_egg as bdist_egg_cls # Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for # now. See https://github.com/pypa/setuptools/issues/199/ @@ -135,7 +137,8 @@ def do_egg_install(self): cmd.package_index.scan(glob.glob('*.egg')) self.run_command('bdist_egg') - args = [self.distribution.get_command_obj('bdist_egg').egg_output] + bdist_egg = cast(bdist_egg_cls, self.distribution.get_command_obj('bdist_egg')) + args = [bdist_egg.egg_output] if setuptools.bootstrap_install_from: # Bootstrap self-installation of setuptools diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py index 0c24f7aef4..3fbbb62553 100644 --- a/setuptools/command/upload_docs.py +++ b/setuptools/command/upload_docs.py @@ -50,7 +50,7 @@ def has_sphinx(self): and metadata.entry_points(group='distutils.commands', name='build_sphinx') ) - sub_commands = [('build_sphinx', has_sphinx)] # type: ignore[list-item] # TODO: Fix in distutils stubs + sub_commands = [('build_sphinx', has_sphinx)] # type: ignore[list-item] # TODO: Fix in typeshed distutils stubs def initialize_options(self): upload.initialize_options(self) diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py index 32fb00131e..0061f1b60e 100644 --- a/setuptools/config/_apply_pyprojecttoml.py +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -34,6 +34,7 @@ from ..warnings import SetuptoolsWarning if TYPE_CHECKING: + from distutils.dist import _OptionsList from setuptools._importlib import metadata # noqa from setuptools.dist import Distribution # noqa @@ -294,7 +295,7 @@ def _normalise_cmd_option_key(name: str) -> str: return json_compatible_key(name).strip("_=") -def _normalise_cmd_options(desc: List[Tuple[str, Optional[str], str]]) -> Set[str]: +def _normalise_cmd_options(desc: "_OptionsList") -> Set[str]: return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc} diff --git a/setuptools/dist.py b/setuptools/dist.py index 9eed330c51..6350e38100 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -10,7 +10,7 @@ from contextlib import suppress from glob import iglob from pathlib import Path -from typing import Dict, List, MutableMapping, Optional, Set, Tuple +from typing import TYPE_CHECKING, Dict, List, MutableMapping, Optional, Set, Tuple import distutils.cmd import distutils.command @@ -202,10 +202,14 @@ def check_packages(dist, attr, value): ) -_Distribution = get_unpatched(distutils.core.Distribution) +if TYPE_CHECKING: + # Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962 + _Distribution = distutils.core.Distribution +else: + _Distribution = get_unpatched(distutils.core.Distribution) -class Distribution(_Distribution): # type: ignore[valid-type, misc] # https://github.com/python/mypy/issues/14458 +class Distribution(_Distribution): """Distribution with support for tests and package data This is an enhanced version of 'distutils.dist.Distribution' that diff --git a/setuptools/extension.py b/setuptools/extension.py index 9eea0d6930..8caad78d4b 100644 --- a/setuptools/extension.py +++ b/setuptools/extension.py @@ -3,6 +3,7 @@ import distutils.core import distutils.errors import distutils.extension +from typing import TYPE_CHECKING from .monkey import get_unpatched @@ -23,11 +24,14 @@ def _have_cython(): # for compatibility have_pyrex = _have_cython +if TYPE_CHECKING: + # Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962 + _Extension = distutils.core.Extension +else: + _Extension = get_unpatched(distutils.core.Extension) -_Extension = get_unpatched(distutils.core.Extension) - -class Extension(_Extension): # type: ignore[valid-type, misc] # https://github.com/python/mypy/issues/14458 +class Extension(_Extension): """ Describes a single extension module. diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py index 4d18e3214b..12ed4d328c 100644 --- a/setuptools/tests/test_bdist_egg.py +++ b/setuptools/tests/test_bdist_egg.py @@ -47,7 +47,7 @@ def test_bdist_egg(self, setup_context, user_override): assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content) @pytest.mark.xfail( - os.environ.get('PYTHONDONTWRITEBYTECODE'), # type: ignore[arg-type] # Unintended usage of xfail + os.environ.get('PYTHONDONTWRITEBYTECODE', False), reason="Byte code disabled", ) def test_exclude_source_files(self, setup_context, user_override): diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py index ef8536bf4e..ba019dc79d 100644 --- a/setuptools/tests/test_egg_info.py +++ b/setuptools/tests/test_egg_info.py @@ -79,7 +79,7 @@ def run(): @staticmethod def _extract_mv_version(pkg_info_lines: List[str]) -> Tuple[int, int]: version_str = pkg_info_lines[0].split(' ')[1] - major, minor, *_ = map(int, version_str.split('.')) + major, minor = map(int, version_str.split('.')[:2]) return major, minor def test_egg_info_save_version_info_setup_empty(self, tmpdir_cwd, env): From 379a04119bf49c11bedd1887458e607808bffca3 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 21 Feb 2024 23:39:59 -0500 Subject: [PATCH 18/25] Comments updates and don't touch vendored --- mypy.ini | 17 ++++++++++++----- setuptools/_vendor/packaging/_manylinux.py | 2 +- setuptools/command/editable_wheel.py | 3 ++- 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/mypy.ini b/mypy.ini index 7df314a5ae..e0fa8e5c47 100644 --- a/mypy.ini +++ b/mypy.ini @@ -16,20 +16,27 @@ exclude = (?x)( disable_error_code = # TODO: Test environment is not yet properly configured to install all imported packages # import-not-found, # This can be left commented out for local runs until we enforce running mypy in the CI - # TODO: Not all dependencies are typed. Nanely: distutils._modified, wheel.wheelfile, and jaraco.* + # TODO: Not all dependencies are typed. Namely: distutils._modified, wheel.wheelfile, and jaraco.* import-untyped, # Ignoring attr-defined because setuptools wraps a lot of distutils classes, adding new attributes, # w/o updating all the attributes and return types from the base classes for type-checkers to understand # Especially with setuptools.dist.command vs distutils.dist.command vs setuptools._distutils.dist.command # *.extern modules that actually live in *._vendor will also cause attr-defined issues on import - attr-defined + attr-defined, # Avoid raising issues when importing from "extern" modules, as those are added to path dynamically. # https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993 [mypy-pkg_resources.extern.*,setuptools.extern.*] ignore_missing_imports = True -# Tests include creating dynamic modules that won't exists statically before the test is run. -# Let's ignore all "import-not-found", as if an import really wasn't found, then the test would fail. [mypy-pkg_resources.tests.*,setuptools.tests.*] -disable_error_code = import-not-found, import-untyped +disable_error_code = + # Tests include creating dynamic modules that won't exists statically before the test is run. + # Let's ignore all "import-not-found", as if an import really wasn't found, then the test would fail. + import-not-found, + # mmany untyped "jaraco" modules + import-untyped, + +# Mypy issue, this vendored module is already excluded! +[mypy-setuptools._vendor.packaging._manylinux] +disable_error_code = import-not-found diff --git a/setuptools/_vendor/packaging/_manylinux.py b/setuptools/_vendor/packaging/_manylinux.py index cd868ddcea..638566d5a3 100644 --- a/setuptools/_vendor/packaging/_manylinux.py +++ b/setuptools/_vendor/packaging/_manylinux.py @@ -173,7 +173,7 @@ def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool: return False # Check for presence of _manylinux module. try: - import _manylinux # type: ignore[import-not-found] # noqa # Expected could be missing + import _manylinux except ImportError: return True if hasattr(_manylinux, "manylinux_compatible"): diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 894acd4843..37225d87d9 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -247,7 +247,8 @@ def _configure_build( def _set_editable_mode(self): """Set the ``editable_mode`` flag in the build sub-commands""" dist = self.distribution - build = cast(build_cls, dist.get_command_obj("build")) + build = dist.get_command_obj("build") + # TODO: Update typeshed distutils stubs to overload non-None return type by default for cmd_name in build.get_sub_commands(): cmd = dist.get_command_obj(cmd_name) if hasattr(cmd, "editable_mode"): From 555e9ffed61886ab5578025320b4b50641014dbd Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 22 Feb 2024 00:07:47 -0500 Subject: [PATCH 19/25] Accidentally removed noqa --- setuptools/_vendor/packaging/_manylinux.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/_vendor/packaging/_manylinux.py b/setuptools/_vendor/packaging/_manylinux.py index 638566d5a3..449c655be6 100644 --- a/setuptools/_vendor/packaging/_manylinux.py +++ b/setuptools/_vendor/packaging/_manylinux.py @@ -173,7 +173,7 @@ def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool: return False # Check for presence of _manylinux module. try: - import _manylinux + import _manylinux # noqa except ImportError: return True if hasattr(_manylinux, "manylinux_compatible"): From 8aa568a188089f83151c99de58374da3b08c2675 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 22 Feb 2024 09:52:56 -0500 Subject: [PATCH 20/25] Update setuptools/tests/integration/test_pip_install_sdist.py Co-authored-by: Anderson Bravalheri --- setuptools/tests/integration/test_pip_install_sdist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py index f5db400609..baaff8a9ce 100644 --- a/setuptools/tests/integration/test_pip_install_sdist.py +++ b/setuptools/tests/integration/test_pip_install_sdist.py @@ -30,7 +30,7 @@ pytestmark = pytest.mark.integration -(LATEST,) = Enum("v", "LATEST") # type: ignore[misc] # https://github.com/python/mypy/issues/8009#issuecomment-558335186 +(LATEST,) = Enum("v", "LATEST") # type: ignore[misc] # https://github.com/python/mypy/issues/16936 """Default version to be checked""" # There are positive and negative aspects of checking the latest version of the # packages. From b3fde45b875de2aafa08c0dbf2ad0506cc2bd35d Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 22 Feb 2024 10:05:50 -0500 Subject: [PATCH 21/25] Post merge comments Update setuptools/tests/integration/test_pip_install_sdist.py Co-authored-by: Anderson Bravalheri --- setuptools/command/easy_install.py | 6 +----- setuptools/tests/integration/test_pip_install_sdist.py | 4 +--- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index d0ce62adc2..402355bd81 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -25,7 +25,7 @@ from distutils.command import install import sys import os -from typing import TYPE_CHECKING, Dict, List, Union +from typing import Dict, List import zipimport import shutil import tempfile @@ -78,10 +78,6 @@ from .._path import ensure_directory from ..extern.jaraco.text import yield_lines -if TYPE_CHECKING: - # Same as _typeshed.FileDescriptorOrPath - _FileDescriptorOrPath = Union[int, str, bytes, os.PathLike[str], os.PathLike[bytes]] - # Turn on PEP440Warnings warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py index f5db400609..3800698f65 100644 --- a/setuptools/tests/integration/test_pip_install_sdist.py +++ b/setuptools/tests/integration/test_pip_install_sdist.py @@ -1,5 +1,3 @@ -# https://github.com/python/mypy/issues/8009#issuecomment-558335186 -# mypy: disable-error-code="has-type" """Integration tests for setuptools that focus on building packages via pip. The idea behind these tests is not to exhaustively check all the possible @@ -30,7 +28,7 @@ pytestmark = pytest.mark.integration -(LATEST,) = Enum("v", "LATEST") # type: ignore[misc] # https://github.com/python/mypy/issues/8009#issuecomment-558335186 +(LATEST,) = Enum("v", "LATEST") # type: ignore[misc] # https://github.com/python/mypy/issues/16936 """Default version to be checked""" # There are positive and negative aspects of checking the latest version of the # packages. From 55eeabd46fb7f1d4d6796a66fa29150b3aa47153 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 22 Feb 2024 20:36:10 -0500 Subject: [PATCH 22/25] Document that usage of _config_vars is very purposeful Closes #4228 + try to resolve doc issue --- docs/conf.py | 3 ++- setuptools/command/build_ext.py | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 0a82ff2fe2..3c6995a1be 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -168,16 +168,17 @@ ('py:attr', 'CCompiler.language_map'), # undocumented ('py:attr', 'CCompiler.language_order'), # undocumented ('py:class', 'distutils.dist.Distribution'), # undocumented + ('py:class', 'distutils.dist.DistributionMetadata'), # undocumented ('py:class', 'distutils.extension.Extension'), # undocumented ('py:class', 'BorlandCCompiler'), # undocumented ('py:class', 'CCompiler'), # undocumented ('py:class', 'CygwinCCompiler'), # undocumented - ('py:class', 'distutils.dist.DistributionMetadata'), # undocumented ('py:class', 'FileList'), # undocumented ('py:class', 'IShellLink'), # ref to MS docs ('py:class', 'MSVCCompiler'), # undocumented ('py:class', 'OptionDummy'), # undocumented ('py:class', 'UnixCCompiler'), # undocumented + ('py:class', 'setuptools.dist.Distribution'), # undocumented ('py:exc', 'CompileError'), # undocumented ('py:exc', 'DistutilsExecError'), # undocumented ('py:exc', 'DistutilsFileError'), # undocumented diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 982536c6a8..1301433a32 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -26,7 +26,9 @@ # make sure _config_vars is initialized get_config_var("LDSHARED") -from distutils.sysconfig import _config_vars as _CONFIG_VARS # type: ignore # noqa # Not publicly exposed in typeshed distutils stubs +# Not publicly exposed in typeshed distutils stubs, but this is done on purpose +# See https://github.com/pypa/setuptools/pull/4228#issuecomment-1959856400 +from distutils.sysconfig import _config_vars as _CONFIG_VARS # type: ignore # noqa def _customize_compiler_for_shlib(compiler): From e9d79f0e5c3b2959ff44eac573960cf137fa7eab Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 22 Feb 2024 20:43:19 -0500 Subject: [PATCH 23/25] sort nitpick_ignore --- docs/conf.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 3c6995a1be..80e1f0758e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -161,24 +161,24 @@ # Ref: https://stackoverflow.com/a/30624034/595220 nitpick_ignore = [ ('c:func', 'SHGetSpecialFolderPath'), # ref to MS docs + ('envvar', 'DIST_EXTRA_CONFIG'), # undocumented ('envvar', 'DISTUTILS_DEBUG'), # undocumented ('envvar', 'HOME'), # undocumented ('envvar', 'PLAT'), # undocumented - ('envvar', 'DIST_EXTRA_CONFIG'), # undocumented ('py:attr', 'CCompiler.language_map'), # undocumented ('py:attr', 'CCompiler.language_order'), # undocumented - ('py:class', 'distutils.dist.Distribution'), # undocumented - ('py:class', 'distutils.dist.DistributionMetadata'), # undocumented - ('py:class', 'distutils.extension.Extension'), # undocumented ('py:class', 'BorlandCCompiler'), # undocumented ('py:class', 'CCompiler'), # undocumented ('py:class', 'CygwinCCompiler'), # undocumented + ('py:class', 'distutils.dist.Distribution'), # undocumented + ('py:class', 'distutils.dist.DistributionMetadata'), # undocumented + ('py:class', 'distutils.extension.Extension'), # undocumented ('py:class', 'FileList'), # undocumented ('py:class', 'IShellLink'), # ref to MS docs ('py:class', 'MSVCCompiler'), # undocumented ('py:class', 'OptionDummy'), # undocumented - ('py:class', 'UnixCCompiler'), # undocumented ('py:class', 'setuptools.dist.Distribution'), # undocumented + ('py:class', 'UnixCCompiler'), # undocumented ('py:exc', 'CompileError'), # undocumented ('py:exc', 'DistutilsExecError'), # undocumented ('py:exc', 'DistutilsFileError'), # undocumented From 232bcd4d54ac689738b75c3b68a07d3986f10a8f Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 22 Feb 2024 20:45:09 -0500 Subject: [PATCH 24/25] Make only comment on newline like others --- docs/conf.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 80e1f0758e..be8856849b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -187,8 +187,7 @@ ('py:exc', 'PreprocessError'), # undocumented ('py:exc', 'setuptools.errors.PlatformError'), # sphinx cannot find it ('py:func', 'distutils.CCompiler.new_compiler'), # undocumented - # undocumented: - ('py:func', 'distutils.dist.DistributionMetadata.read_pkg_file'), + ('py:func', 'distutils.dist.DistributionMetadata.read_pkg_file'), # undocumented ('py:func', 'distutils.file_util._copy_file_contents'), # undocumented ('py:func', 'distutils.log.debug'), # undocumented ('py:func', 'distutils.spawn.find_executable'), # undocumented From 708fff7bf71cfc66396d62aae651282ff6bbb45c Mon Sep 17 00:00:00 2001 From: Avasam Date: Sun, 25 Feb 2024 14:21:55 -0500 Subject: [PATCH 25/25] Forgot to re-ignore --- setuptools/tests/integration/test_pip_install_sdist.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py index 3800698f65..17bf2af9d2 100644 --- a/setuptools/tests/integration/test_pip_install_sdist.py +++ b/setuptools/tests/integration/test_pip_install_sdist.py @@ -1,3 +1,5 @@ +# https://github.com/python/mypy/issues/16936 +# mypy: disable-error-code="has-type" """Integration tests for setuptools that focus on building packages via pip. The idea behind these tests is not to exhaustively check all the possible