From 9def3c056c94f24e628c405a08b87bfb8d86be65 Mon Sep 17 00:00:00 2001 From: Kyle Benesch <4b796c65+github@gmail.com> Date: Sun, 28 Apr 2024 20:47:52 -0700 Subject: [PATCH 1/5] Drop Python 3.7 and 3.8 3.7 is EOL and 3.8 is nearly EOL --- .github/workflows/python-package.yml | 4 ---- .mypy.ini | 2 +- pyproject.toml | 2 +- 3 files changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 05dfed8b..f1c8432f 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -140,10 +140,6 @@ jobs: - os: "macos-13" xcode: "14.2" python: "3.x" - - os: "ubuntu-latest" - python: "3.7" - - os: "ubuntu-latest" - python: "3.8" - os: "ubuntu-latest" python: "3.9" - os: "ubuntu-latest" diff --git a/.mypy.ini b/.mypy.ini index a0fb419d..a1451945 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -1,5 +1,5 @@ [mypy] -python_version = 3.7 +python_version = 3.9 [mypy-wheel.*] ignore_missing_imports = True diff --git a/pyproject.toml b/pyproject.toml index 2dc95453..ab888191 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ description = "Move macOS dynamic libraries into package" authors = [{ name = "Matthew Brett", email = "matthew.brett@gmail.com" }] maintainers = [{ name = "Matthew Brett", email = "matthew.brett@gmail.com" }] readme = "README.rst" -requires-python = ">=3.7" +requires-python = ">=3.9" license = { file = "LICENSE" } dependencies = [ "bindepend; sys_platform == 'win32'", From 4f379b659fb738710affa30ea4915fb0883fef01 Mon Sep 17 00:00:00 2001 From: Kyle Benesch <4b796c65+github@gmail.com> Date: Thu, 29 Aug 2024 14:56:13 -0700 Subject: [PATCH 2/5] Move Mypy config to pyproject.toml --- .mypy.ini | 14 -------------- pyproject.toml | 11 +++++++++++ 2 files changed, 11 insertions(+), 14 deletions(-) delete mode 100644 .mypy.ini diff --git a/.mypy.ini b/.mypy.ini deleted file mode 100644 index a1451945..00000000 --- a/.mypy.ini +++ /dev/null @@ -1,14 +0,0 @@ -[mypy] -python_version = 3.9 - -[mypy-wheel.*] -ignore_missing_imports = True - -[mypy-pytest] -# Skip incompatible sub-modules. -follow_imports = skip - -[mypy-delocate._version] -# Ignore delocate._version module. -ignore_errors = True -ignore_missing_imports = True diff --git a/pyproject.toml b/pyproject.toml index ab888191..bd7f6d5a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,17 @@ testpaths = ["delocate/"] addopts = ["--doctest-modules", "--cov=delocate", "--cov-config=.coveragerc"] log_file_level = "DEBUG" + +[tool.mypy] +files = ["*.py", "delocate"] +python_version = "3.9" + +[[tool.mypy.overrides]] # Ignore delocate._version module. +module = "delocate._version" +ignore_errors = true +ignore_missing_imports = true + + [tool.ruff] line-length = 80 From 07a98318f2eac991babdb45c6c3f0af718f73933 Mon Sep 17 00:00:00 2001 From: Kyle Benesch <4b796c65+github@gmail.com> Date: Thu, 29 Aug 2024 15:27:56 -0700 Subject: [PATCH 3/5] Apply and enforce Ruff pyupgrade and future-annotations rules --- delocate/cmd/common.py | 5 +- delocate/cmd/delocate_addplat.py | 15 ++-- delocate/cmd/delocate_listdeps.py | 1 - delocate/cmd/delocate_merge.py | 2 +- delocate/cmd/delocate_patch.py | 5 +- delocate/cmd/delocate_path.py | 1 - delocate/cmd/delocate_wheel.py | 5 +- delocate/delocating.py | 130 ++++++++++++--------------- delocate/libsana.py | 106 ++++++++++------------ delocate/pkginfo.py | 9 +- delocate/tests/conftest.py | 2 +- delocate/tests/env_tools.py | 2 +- delocate/tests/test_delocating.py | 23 +++-- delocate/tests/test_fuse.py | 4 +- delocate/tests/test_install_names.py | 17 ++-- delocate/tests/test_libsana.py | 10 +-- delocate/tests/test_scripts.py | 5 +- delocate/tests/test_tmpdirs.py | 2 - delocate/tests/test_tools.py | 12 ++- delocate/tests/test_wheelies.py | 8 +- delocate/tests/test_wheeltools.py | 6 +- delocate/tmpdirs.py | 10 +-- delocate/tools.py | 78 +++++++--------- delocate/wheeltools.py | 25 +++--- pyproject.toml | 12 +-- 25 files changed, 221 insertions(+), 274 deletions(-) diff --git a/delocate/cmd/common.py b/delocate/cmd/common.py index 9811f559..e219d53d 100644 --- a/delocate/cmd/common.py +++ b/delocate/cmd/common.py @@ -10,8 +10,9 @@ import os import sys from argparse import ArgumentParser, Namespace +from collections.abc import Iterable, Iterator from pathlib import Path -from typing import Callable, Iterable, Iterator, List +from typing import Callable from typing_extensions import Literal, TypedDict @@ -102,7 +103,7 @@ class DelocateArgs(TypedDict): def delocate_values(args: Namespace) -> DelocateArgs: """Return the common kwargs for delocate_path and delocate_wheel.""" - exclude_files: List[str] = args.exclude + exclude_files: list[str] = args.exclude def copy_filter_exclude(name: str) -> bool: """Return False if name is excluded, uses normal rules otherwise.""" diff --git a/delocate/cmd/delocate_addplat.py b/delocate/cmd/delocate_addplat.py index 1119eee4..ccbeb455 100755 --- a/delocate/cmd/delocate_addplat.py +++ b/delocate/cmd/delocate_addplat.py @@ -15,7 +15,6 @@ """ # vim: ft=python -from __future__ import absolute_import, division, print_function import os from argparse import ArgumentParser @@ -107,15 +106,15 @@ def main() -> None: # noqa: D103 if args.osx_ver is not None: for ver in args.osx_ver: plat_tags += [ - "macosx_{0}_{1}".format(ver, args.dual_arch_type), - "macosx_{0}_x86_64".format(ver), + f"macosx_{ver}_{args.dual_arch_type}", + f"macosx_{ver}_x86_64", ] if len(plat_tags) == 0: raise RuntimeError("Need at least one --osx-ver or --plat-tag") for wheel in wheels: if multi or args.verbose: print( - "Setting platform tags {0} for wheel {1}".format( + "Setting platform tags {} for wheel {}".format( ",".join(plat_tags), wheel ) ) @@ -125,18 +124,16 @@ def main() -> None: # noqa: D103 ) except WheelToolsError as e: if args.skip_errors: - print("Cannot modify {0} because {1}".format(wheel, e)) + print(f"Cannot modify {wheel} because {e}") continue raise if args.verbose: if fname is None: print( - "{0} already has tags {1}".format( - wheel, ", ".join(plat_tags) - ) + "{} already has tags {}".format(wheel, ", ".join(plat_tags)) ) else: - print("Wrote {0}".format(fname)) + print(f"Wrote {fname}") if ( args.rm_orig and fname is not None diff --git a/delocate/cmd/delocate_listdeps.py b/delocate/cmd/delocate_listdeps.py index d3c19cf7..6627c7fd 100755 --- a/delocate/cmd/delocate_listdeps.py +++ b/delocate/cmd/delocate_listdeps.py @@ -2,7 +2,6 @@ """List library dependencies for libraries in path or wheel.""" # vim: ft=python -from __future__ import absolute_import, division, print_function from argparse import ArgumentParser from os import getcwd diff --git a/delocate/cmd/delocate_merge.py b/delocate/cmd/delocate_merge.py index fdf096f1..3a3b3e60 100755 --- a/delocate/cmd/delocate_merge.py +++ b/delocate/cmd/delocate_merge.py @@ -31,7 +31,7 @@ def main() -> None: # noqa: D103 args = parser.parse_args() verbosity_config(args) - wheel1, wheel2 = [Path(wheel).resolve(strict=True) for wheel in args.wheels] + wheel1, wheel2 = (Path(wheel).resolve(strict=True) for wheel in args.wheels) out_wheel = Path( args.wheel_dir if args.wheel_dir is not None else wheel1.parent ).resolve() diff --git a/delocate/cmd/delocate_patch.py b/delocate/cmd/delocate_patch.py index 92b5d1fe..b1c9e455 100755 --- a/delocate/cmd/delocate_patch.py +++ b/delocate/cmd/delocate_patch.py @@ -5,7 +5,6 @@ """ # vim: ft=python -from __future__ import absolute_import, division, print_function import os from argparse import ArgumentParser @@ -41,14 +40,14 @@ def main() -> None: # noqa: D103 else: wheel_dir = None if args.verbose: - print("Patching: {0} with {1}".format(args.wheel, args.patch_fname)) + print(f"Patching: {args.wheel} with {args.patch_fname}") if wheel_dir: out_wheel = pjoin(wheel_dir, basename(args.wheel)) else: out_wheel = args.wheel patch_wheel(args.wheel, args.patch_fname, out_wheel) if args.verbose: - print("Patched wheel {0} to {1}:".format(args.wheel, out_wheel)) + print(f"Patched wheel {args.wheel} to {out_wheel}:") if __name__ == "__main__": diff --git a/delocate/cmd/delocate_path.py b/delocate/cmd/delocate_path.py index 014b896c..ef07f266 100755 --- a/delocate/cmd/delocate_path.py +++ b/delocate/cmd/delocate_path.py @@ -2,7 +2,6 @@ """Copy, relink library dependencies for libraries in path.""" # vim: ft=python -from __future__ import absolute_import, division, print_function import os from argparse import ArgumentParser diff --git a/delocate/cmd/delocate_wheel.py b/delocate/cmd/delocate_wheel.py index a564917b..b4ca3cf4 100755 --- a/delocate/cmd/delocate_wheel.py +++ b/delocate/cmd/delocate_wheel.py @@ -14,7 +14,6 @@ from argparse import ArgumentParser from os.path import basename, exists, expanduser from os.path import join as pjoin -from typing import List, Optional, Text from packaging.version import Version @@ -89,7 +88,7 @@ def main() -> None: # noqa: D103 os.makedirs(wheel_dir) else: wheel_dir = None - require_archs: Optional[List[Text]] = None + require_archs: list[str] | None = None if args.require_archs is None: require_archs = [] if args.check_archs else None elif "," in args.require_archs: @@ -122,7 +121,7 @@ def main() -> None: # noqa: D103 **delocate_values(args), ) if args.verbose and len(copied): - print("Copied to package {0} directory:".format(args.lib_sdir)) + print(f"Copied to package {args.lib_sdir} directory:") copy_lines = [" " + name for name in sorted(copied)] print("\n".join(copy_lines)) diff --git a/delocate/delocating.py b/delocate/delocating.py index 131386cf..ca1e652a 100644 --- a/delocate/delocating.py +++ b/delocate/delocating.py @@ -9,23 +9,14 @@ import shutil import stat import warnings +from collections.abc import Iterable, Iterator, Mapping from os.path import abspath, basename, dirname, exists, realpath, relpath from os.path import join as pjoin from pathlib import Path from subprocess import PIPE, Popen from typing import ( Callable, - Dict, - FrozenSet, - Iterable, - Iterator, - List, - Mapping, - Optional, - Set, - Text, - Tuple, - Union, + Final, ) from macholib.mach_o import ( # type: ignore[import-untyped] @@ -36,7 +27,6 @@ from macholib.MachO import MachO # type: ignore[import-untyped] from packaging.utils import parse_wheel_filename from packaging.version import Version -from typing_extensions import Final from .libsana import ( DelocationError, @@ -70,12 +60,12 @@ def delocate_tree_libs( - lib_dict: Mapping[Text, Mapping[Text, Text]], - lib_path: Text, - root_path: Text, + lib_dict: Mapping[str, Mapping[str, str]], + lib_path: str, + root_path: str, *, sanitize_rpaths: bool = False, -) -> Dict[Text, Dict[Text, Text]]: +) -> dict[str, dict[str, str]]: """Move needed libraries in `lib_dict` into `lib_path`. `lib_dict` has keys naming libraries required by the files in the @@ -137,8 +127,8 @@ def delocate_tree_libs( def _sanitize_rpaths( - lib_dict: Mapping[Text, Mapping[Text, Text]], - files_to_delocate: Iterable[Text], + lib_dict: Mapping[str, Mapping[str, str]], + files_to_delocate: Iterable[str], ) -> None: """Sanitize the rpaths of libraries.""" for required in files_to_delocate: @@ -148,9 +138,9 @@ def _sanitize_rpaths( def _analyze_tree_libs( - lib_dict: Mapping[Text, Mapping[Text, Text]], - root_path: Text, -) -> Tuple[Dict[Text, Dict[Text, Text]], Set[Text]]: + lib_dict: Mapping[str, Mapping[str, str]], + root_path: str, +) -> tuple[dict[str, dict[str, str]], set[str]]: """Verify then return which library files to copy and delocate. Returns @@ -168,7 +158,7 @@ def _analyze_tree_libs( for required, requirings in lib_dict.items(): if required.startswith("@"): # @rpath, etc, at this point should never happen. - raise DelocationError("%s was expected to be resolved." % required) + raise DelocationError(f"{required} was expected to be resolved.") r_ed_base = basename(required) if relpath(required, rp_root_path).startswith(".."): # Not local, plan to copy @@ -178,9 +168,7 @@ def _analyze_tree_libs( + r_ed_base ) if not exists(required): - raise DelocationError( - 'library "{0}" does not exist'.format(required) - ) + raise DelocationError(f'library "{required}" does not exist') # Copy requirings to preserve it since it will be modified later. needs_copying[required] = dict(requirings) copied_basenames.add(r_ed_base) @@ -190,11 +178,11 @@ def _analyze_tree_libs( def _copy_required_libs( - lib_dict: Mapping[Text, Mapping[Text, Text]], - lib_path: Text, - root_path: Text, - libraries_to_copy: Iterable[Text], -) -> Tuple[Dict[Text, Dict[Text, Text]], Set[Text]]: + lib_dict: Mapping[str, Mapping[str, str]], + lib_path: str, + root_path: str, + libraries_to_copy: Iterable[str], +) -> tuple[dict[str, dict[str, str]], set[str]]: """Copy libraries outside of root_path to lib_path. Returns @@ -234,9 +222,9 @@ def _copy_required_libs( def _update_install_names( - lib_dict: Mapping[Text, Mapping[Text, Text]], - root_path: Text, - files_to_delocate: Iterable[Text], + lib_dict: Mapping[str, Mapping[str, str]], + root_path: str, + files_to_delocate: Iterable[str], ) -> None: """Update the install names of libraries.""" for required in files_to_delocate: @@ -262,10 +250,10 @@ def _update_install_names( def copy_recurse( - lib_path: Text, - copy_filt_func: Optional[Callable[[Text], bool]] = None, - copied_libs: Optional[Dict[Text, Dict[Text, Text]]] = None, -) -> Dict[Text, Dict[Text, Text]]: + lib_path: str, + copy_filt_func: Callable[[str], bool] | None = None, + copied_libs: dict[str, dict[str, str]] | None = None, +) -> dict[str, dict[str, str]]: """Analyze `lib_path` for library dependencies and copy libraries. `lib_path` is a directory containing libraries. The libraries might @@ -321,9 +309,9 @@ def copy_recurse( def _copy_required( - lib_path: Text, - copy_filt_func: Optional[Callable[[Text], bool]], - copied_libs: Dict[Text, Dict[Text, Text]], + lib_path: str, + copy_filt_func: Callable[[str], bool] | None, + copied_libs: dict[str, dict[str, str]], ) -> None: """Copy libraries required for files in `lib_path` to `copied_libs`. @@ -438,15 +426,15 @@ def _delocate_filter_function( def delocate_path( - tree_path: Text, - lib_path: Text, - lib_filt_func: Optional[Union[str, Callable[[Text], bool]]] = None, - copy_filt_func: Optional[Callable[[Text], bool]] = filter_system_libs, - executable_path: Optional[Text] = None, + tree_path: str, + lib_path: str, + lib_filt_func: str | Callable[[str], bool] | None = None, + copy_filt_func: Callable[[str], bool] | None = filter_system_libs, + executable_path: str | None = None, ignore_missing: bool = False, *, sanitize_rpaths: bool = False, -) -> Dict[Text, Dict[Text, Text]]: +) -> dict[str, dict[str, str]]: """Copy required libraries for files in `tree_path` into `lib_path`. Parameters @@ -522,8 +510,8 @@ def delocate_path( def _copy_lib_dict( - lib_dict: Mapping[Text, Mapping[Text, Text]], -) -> Dict[Text, Dict[Text, Text]]: + lib_dict: Mapping[str, Mapping[str, str]], +) -> dict[str, dict[str, str]]: """Return a copy of lib_dict.""" return { # Convert nested Mapping types into nested Dict types. required: dict(requiring) for required, requiring in lib_dict.items() @@ -667,8 +655,8 @@ def _get_archs_and_version_from_wheel_name( def _get_incompatible_libs( - required_version: Optional[Version], - version_lib_dict: Dict[Version, List[Path]], + required_version: Version | None, + version_lib_dict: dict[Version, list[Path]], arch: str, ) -> set[tuple[Path, Version]]: """Find libraries which require a more modern macOS version. @@ -777,7 +765,7 @@ def _pack_architectures( def _calculate_minimum_wheel_name( wheel_name: str, wheel_dir: Path, - require_target_macos_version: Optional[Version], + require_target_macos_version: Version | None, ) -> tuple[str, set[tuple[Path, Version]]]: """Return a wheel name with an updated platform tag. @@ -810,7 +798,7 @@ def _calculate_minimum_wheel_name( ) # get the architecture and minimum macOS version from the libraries # in the wheel - all_library_versions: Dict[str, Dict[Version, List[Path]]] = {} + all_library_versions: dict[str, dict[Version, list[Path]]] = {} for lib in wheel_dir.glob("**/*"): for arch, version in _get_macos_min_version(lib): @@ -892,7 +880,7 @@ def get_macos_platform_tag(version: Version, architecture: str) -> str: def _check_and_update_wheel_name( wheel_path: Path, wheel_dir: Path, - require_target_macos_version: Optional[Version], + require_target_macos_version: Version | None, ) -> Path: """Determine the minimum platform tag and update the wheel name if needed. @@ -958,18 +946,18 @@ def _update_wheelfile(wheel_dir: Path, wheel_name: str) -> None: def delocate_wheel( in_wheel: str, - out_wheel: Optional[str] = None, + out_wheel: str | None = None, lib_sdir: str = ".dylibs", - lib_filt_func: Union[None, str, Callable[[str], bool]] = None, - copy_filt_func: Optional[Callable[[str], bool]] = filter_system_libs, - require_archs: Union[None, str, Iterable[str]] = None, - check_verbose: Optional[bool] = None, + lib_filt_func: Callable[[str], bool] | str | None = None, + copy_filt_func: Callable[[str], bool] | None = filter_system_libs, + require_archs: Iterable[str] | str | None = None, + check_verbose: bool | None = None, *, - executable_path: Optional[str] = None, + executable_path: str | None = None, ignore_missing: bool = False, sanitize_rpaths: bool = False, - require_target_macos_version: Optional[Version] = None, -) -> Dict[str, Dict[str, str]]: + require_target_macos_version: Version | None = None, +) -> dict[str, dict[str, str]]: """Update wheel by copying required libraries to `lib_sdir` in wheel. Create `lib_sdir` in wheel tree only if we are copying one or more @@ -1104,7 +1092,7 @@ def delocate_wheel( def patch_wheel( - in_wheel: Text, patch_fname: Text, out_wheel: Optional[Text] = None + in_wheel: str, patch_fname: str, out_wheel: str | None = None ) -> None: """Apply ``-p1`` style patch in `patch_fname` to contents of `in_wheel`. @@ -1128,7 +1116,7 @@ def patch_wheel( else: out_wheel = abspath(out_wheel) if not exists(patch_fname): - raise ValueError("patch file {0} does not exist".format(patch_fname)) + raise ValueError(f"patch file {patch_fname} does not exist") with InWheel(in_wheel, out_wheel): with open(patch_fname, "rb") as fobj: patch_proc = Popen( @@ -1145,12 +1133,10 @@ def patch_wheel( def check_archs( - copied_libs: Mapping[Text, Mapping[Text, Text]], - require_archs: Union[Text, Iterable[Text]] = (), + copied_libs: Mapping[str, Mapping[str, str]], + require_archs: str | Iterable[str] = (), stop_fast: bool = False, -) -> Set[ - Union[Tuple[Text, FrozenSet[Text]], Tuple[Text, Text, FrozenSet[Text]]] -]: +) -> set[tuple[str, frozenset[str]] | tuple[str, str, frozenset[str]]]: """Check compatibility of archs in `copied_libs` dict. Parameters @@ -1193,8 +1179,8 @@ def check_archs( if isinstance(require_archs, str): require_archs = _ARCH_LOOKUP.get(require_archs, [require_archs]) require_archs_set = frozenset(require_archs) - bads: List[ - Union[Tuple[Text, FrozenSet[Text]], Tuple[Text, Text, FrozenSet[Text]]] + bads: list[ + tuple[str, frozenset[str]] | tuple[str, str, frozenset[str]] ] = [] for depended_lib, dep_dict in copied_libs.items(): depended_archs = get_archs(depended_lib) @@ -1247,7 +1233,7 @@ def bads_report(bads, path_prefix=None): if len(result) == 3: depended_lib, depending_lib, missing_archs = result reports.append( - "{0} needs {1} {2} missing from {3}".format( + "{} needs {} {} missing from {}".format( path_processor(depending_lib), "archs" if len(missing_archs) > 1 else "arch", ", ".join(sorted(missing_archs)), @@ -1257,7 +1243,7 @@ def bads_report(bads, path_prefix=None): elif len(result) == 2: depending_lib, missing_archs = result reports.append( - "Required {0} {1} missing from {2}".format( + "Required {} {} missing from {}".format( "archs" if len(missing_archs) > 1 else "arch", ", ".join(sorted(missing_archs)), path_processor(depending_lib), diff --git a/delocate/libsana.py b/delocate/libsana.py index 0b91fadc..dab597ea 100644 --- a/delocate/libsana.py +++ b/delocate/libsana.py @@ -3,22 +3,17 @@ Analyze library dependencies in paths and wheel files. """ +from __future__ import annotations + import logging import os import sys import warnings +from collections.abc import Iterable, Iterator from os.path import basename, dirname, realpath from os.path import join as pjoin from typing import ( Callable, - Dict, - Iterable, - Iterator, - List, - Optional, - Set, - Text, - Tuple, ) from .tmpdirs import TemporaryDirectory @@ -40,15 +35,15 @@ class DependencyNotFound(Exception): """Raised by tree_libs or resolve_rpath if an expected dependency is missing.""" # noqa: E501 -def _filter_system_libs(libname: Text) -> bool: +def _filter_system_libs(libname: str) -> bool: return not (libname.startswith("/usr/lib") or libname.startswith("/System")) def get_dependencies( - lib_fname: Text, - executable_path: Optional[Text] = None, + lib_fname: str, + executable_path: str | None = None, filt_func: Callable[[str], bool] = lambda filepath: True, -) -> Iterator[Tuple[Optional[Text], Text]]: +) -> Iterator[tuple[str | None, str]]: """Find and yield the real paths of dependencies of the library `lib_fname`. This function is used to search for the real files that are required by @@ -88,7 +83,7 @@ def get_dependencies( When `lib_fname` does not exist. """ if not filt_func(lib_fname): - logger.debug("Ignoring dependencies of %s" % lib_fname) + logger.debug(f"Ignoring dependencies of {lib_fname}") return if not os.path.isfile(lib_fname): if not _filter_system_libs(lib_fname): @@ -125,10 +120,7 @@ def get_dependencies( ) yield dependency_path, install_name except DependencyNotFound: - message = "\n%s not found:\n Needed by: %s" % ( - install_name, - lib_fname, - ) + message = f"\n{install_name} not found:\n Needed by: {lib_fname}" if install_name.startswith("@rpath"): message += "\n Search path:\n " + "\n ".join(rpaths) logger.error(message) @@ -137,11 +129,11 @@ def get_dependencies( def walk_library( - lib_fname: Text, - filt_func: Callable[[Text], bool] = lambda filepath: True, - visited: Optional[Set[Text]] = None, - executable_path: Optional[Text] = None, -) -> Iterator[Text]: + lib_fname: str, + filt_func: Callable[[str], bool] = lambda filepath: True, + visited: set[str] | None = None, + executable_path: str | None = None, +) -> Iterator[str]: """ Yield all libraries on which `lib_fname` depends, directly or indirectly. @@ -193,20 +185,19 @@ def walk_library( lib_fname, ) continue - for sub_dependency in walk_library( + yield from walk_library( dependency_fname, filt_func=filt_func, visited=visited, executable_path=executable_path, - ): - yield sub_dependency + ) def walk_directory( - root_path: Text, - filt_func: Callable[[Text], bool] = lambda filepath: True, - executable_path: Optional[Text] = None, -) -> Iterator[Text]: + root_path: str, + filt_func: Callable[[str], bool] = lambda filepath: True, + executable_path: str | None = None, +) -> Iterator[str]: """Walk along dependencies starting with the libraries within `root_path`. Dependencies which can not be resolved will be logged and ignored. @@ -231,7 +222,7 @@ def walk_directory( Iterates over the libraries in `root_path` and each of their dependencies without any duplicates. """ - visited_paths: Set[Text] = set() + visited_paths: set[str] = set() for dirpath, dirnames, basenames in os.walk(root_path): for base in basenames: depending_path = realpath(pjoin(dirpath, base)) @@ -239,13 +230,12 @@ def walk_directory( continue # A library in root_path was a dependency of another. if not filt_func(depending_path): continue - for library_path in walk_library( + yield from walk_library( depending_path, filt_func=filt_func, visited=visited_paths, executable_path=executable_path, - ): - yield library_path + ) def _tree_libs_from_libraries( @@ -253,9 +243,9 @@ def _tree_libs_from_libraries( *, lib_filt_func: Callable[[str], bool], copy_filt_func: Callable[[str], bool], - executable_path: Optional[str] = None, + executable_path: str | None = None, ignore_missing: bool = False, -) -> Dict[str, Dict[str, str]]: +) -> dict[str, dict[str, str]]: """Return an analysis of the dependencies of `libraries`. Parameters @@ -298,7 +288,7 @@ def _tree_libs_from_libraries( When any dependencies can not be located and ``ignore_missing`` is False. """ - lib_dict: Dict[str, Dict[str, str]] = {} + lib_dict: dict[str, dict[str, str]] = {} missing_libs = False for library_path in libraries: for depending_path, install_name in get_dependencies( @@ -327,9 +317,9 @@ def tree_libs_from_directory( *, lib_filt_func: Callable[[str], bool] = _filter_system_libs, copy_filt_func: Callable[[str], bool] = lambda path: True, - executable_path: Optional[str] = None, + executable_path: str | None = None, ignore_missing: bool = False, -) -> Dict[Text, Dict[Text, Text]]: +) -> dict[str, dict[str, str]]: """Return an analysis of the libraries in the directory of `start_path`. Parameters @@ -390,9 +380,9 @@ def _allow_all(path: str) -> bool: def tree_libs( - start_path: Text, - filt_func: Optional[Callable[[Text], bool]] = None, -) -> Dict[Text, Dict[Text, Text]]: + start_path: str, + filt_func: Callable[[str], bool] | None = None, +) -> dict[str, dict[str, str]]: """Return analysis of library dependencies within `start_path`. Parameters @@ -440,7 +430,7 @@ def tree_libs( ) if filt_func is None: filt_func = _allow_all - lib_dict: Dict[Text, Dict[Text, Text]] = {} + lib_dict: dict[str, dict[str, str]] = {} for dirpath, dirnames, basenames in os.walk(start_path): for base in basenames: depending_path = realpath(pjoin(dirpath, base)) @@ -469,11 +459,11 @@ def tree_libs( def resolve_dynamic_paths( - lib_path: Text, - rpaths: Iterable[Text], - loader_path: Text, - executable_path: Optional[Text] = None, -) -> Text: + lib_path: str, + rpaths: Iterable[str], + loader_path: str, + executable_path: str | None = None, +) -> str: """Return `lib_path` with any special runtime linking names resolved. If `lib_path` has `@rpath` then returns the first `rpaths`/`lib_path` @@ -541,7 +531,7 @@ def resolve_dynamic_paths( raise DependencyNotFound(lib_path) -def resolve_rpath(lib_path: Text, rpaths: Iterable[Text]) -> Text: +def resolve_rpath(lib_path: str, rpaths: Iterable[str]) -> str: """Return `lib_path` with its `@rpath` resolved. If the `lib_path` doesn't have `@rpath` then it's returned as is. @@ -581,7 +571,7 @@ def resolve_rpath(lib_path: Text, rpaths: Iterable[Text]) -> Text: return rpath_lib warnings.warn( - "Couldn't find {0} on paths:\n\t{1}".format( + "Couldn't find {} on paths:\n\t{}".format( lib_path, "\n\t".join(realpath(path) for path in rpaths), ) @@ -589,7 +579,7 @@ def resolve_rpath(lib_path: Text, rpaths: Iterable[Text]) -> Text: return lib_path -def search_environment_for_lib(lib_path: Text) -> Text: +def search_environment_for_lib(lib_path: str) -> str: """Search common environment variables for `lib_path`. We'll use a single approach here: @@ -643,7 +633,7 @@ def search_environment_for_lib(lib_path: Text) -> Text: return realpath(lib_path) -def get_prefix_stripper(strip_prefix: Text) -> Callable[[Text], Text]: +def get_prefix_stripper(strip_prefix: str) -> Callable[[str], str]: """Return function to strip `strip_prefix` prefix from string if present. Parameters @@ -659,13 +649,13 @@ def get_prefix_stripper(strip_prefix: Text) -> Callable[[Text], Text]: """ n = len(strip_prefix) - def stripper(path: Text) -> Text: + def stripper(path: str) -> str: return path if not path.startswith(strip_prefix) else path[n:] return stripper -def get_rp_stripper(strip_path: Text) -> Callable[[Text], Text]: +def get_rp_stripper(strip_path: str) -> Callable[[str], str]: """Return function to strip ``realpath`` of `strip_path` from string. Parameters @@ -684,8 +674,8 @@ def get_rp_stripper(strip_path: Text) -> Callable[[Text], Text]: def stripped_lib_dict( - lib_dict: Dict[Text, Dict[Text, Text]], strip_prefix: Text -) -> Dict[Text, Dict[Text, Text]]: + lib_dict: dict[str, dict[str, str]], strip_prefix: str +) -> dict[str, dict[str, str]]: """Return `lib_dict` with `strip_prefix` removed from start of paths. Use to give form of `lib_dict` that appears relative to some base path @@ -720,10 +710,10 @@ def stripped_lib_dict( def wheel_libs( wheel_fname: str, - filt_func: Optional[Callable[[Text], bool]] = None, + filt_func: Callable[[str], bool] | None = None, *, ignore_missing: bool = False, -) -> Dict[Text, Dict[Text, Text]]: +) -> dict[str, dict[str, str]]: """Return analysis of library dependencies with a Python wheel. Use this routine for a dump of the dependency tree. @@ -764,7 +754,7 @@ def wheel_libs( return stripped_lib_dict(lib_dict, realpath(tmpdir) + os.path.sep) -def _paths_from_var(varname: str, lib_basename: str) -> List[str]: +def _paths_from_var(varname: str, lib_basename: str) -> list[str]: var = os.environ.get(varname) if var is None: return [] diff --git a/delocate/pkginfo.py b/delocate/pkginfo.py index e8b30129..4fb7c46e 100644 --- a/delocate/pkginfo.py +++ b/delocate/pkginfo.py @@ -3,27 +3,28 @@ This is based on a copy of the old wheel.pkginfo module. """ # noqa: E501 +from __future__ import annotations + from email.generator import Generator from email.message import Message from email.parser import Parser from os import PathLike -from typing import Union -def read_pkg_info_bytes(bytestr: Union[bytes, str]) -> Message: +def read_pkg_info_bytes(bytestr: bytes | str) -> Message: """Parse a PKG-INFO or METADATA data string.""" if isinstance(bytestr, bytes): bytestr = bytestr.decode("utf-8") return Parser().parsestr(bytestr) -def read_pkg_info(path: Union[bytes, str, PathLike]) -> Message: +def read_pkg_info(path: bytes | str | PathLike) -> Message: """Read a PKG-INFO or METADATA file.""" with open(path, encoding="utf-8") as headers: return Parser().parse(headers) -def write_pkg_info(path: Union[bytes, str, PathLike], message: Message) -> None: +def write_pkg_info(path: bytes | str | PathLike, message: Message) -> None: """Write to a PKG-INFO or METADATA file.""" with open(path, "w", encoding="utf-8") as out: Generator(out, mangle_from_=False, maxheaderlen=0).flatten(message) diff --git a/delocate/tests/conftest.py b/delocate/tests/conftest.py index a6f7540d..570aa247 100644 --- a/delocate/tests/conftest.py +++ b/delocate/tests/conftest.py @@ -1,8 +1,8 @@ """Pytest configuration script.""" import os +from collections.abc import Iterator from pathlib import Path -from typing import Iterator import pytest diff --git a/delocate/tests/env_tools.py b/delocate/tests/env_tools.py index 092d8438..0d2e1c16 100644 --- a/delocate/tests/env_tools.py +++ b/delocate/tests/env_tools.py @@ -1,8 +1,8 @@ """Context managers for working with environment variables.""" import os +from collections.abc import Iterator from contextlib import contextmanager -from typing import Iterator from ..tmpdirs import InTemporaryDirectory diff --git a/delocate/tests/test_delocating.py b/delocate/tests/test_delocating.py index 7a565c7a..1a8ba1b6 100644 --- a/delocate/tests/test_delocating.py +++ b/delocate/tests/test_delocating.py @@ -1,15 +1,14 @@ """Tests for relocating libraries.""" -from __future__ import division, print_function - import os import shutil import subprocess import sys from collections import namedtuple +from collections.abc import Iterable from os.path import basename, dirname, realpath, relpath, splitext from os.path import join as pjoin -from typing import Any, Callable, Dict, Iterable, List, Set, Text, Tuple +from typing import Any, Callable import pytest from packaging.utils import InvalidWheelFilename @@ -100,7 +99,7 @@ def without_system_libs(obj): "tree_libs_func", [tree_libs, tree_libs_from_directory] ) def test_delocate_tree_libs( - tree_libs_func: Callable[[str], Dict[Text, Dict[Text, Text]]], + tree_libs_func: Callable[[str], dict[str, dict[str, str]]], ) -> None: # Test routine to copy library dependencies into a local directory with InTemporaryDirectory() as tmpdir: @@ -192,9 +191,9 @@ def test_delocate_tree_libs( lib_dict2 = without_system_libs(tree_libs_func(subtree2)) copied2 = delocate_tree_libs(lib_dict2, copy_dir2, "/fictional") local_libs = [liba, libb, libc, slibc, test_lib, stest_lib] - rp_liba, rp_libb, rp_libc, rp_slibc, rp_test_lib, rp_stest_lib = [ + rp_liba, rp_libb, rp_libc, rp_slibc, rp_test_lib, rp_stest_lib = ( realpath(L) for L in local_libs - ] + ) exp_dict = { rp_libc: {rp_test_lib: libc}, rp_slibc: {rp_stest_lib: slibc}, @@ -218,7 +217,7 @@ def test_delocate_tree_libs( assert set(new_links) <= set(lib_inames) -def _copy_fixpath(files: Iterable[str], directory: str) -> List[str]: +def _copy_fixpath(files: Iterable[str], directory: str) -> list[str]: new_fnames = [] for fname in files: shutil.copy2(fname, directory) @@ -363,7 +362,7 @@ def test_copy_recurse_overwrite() -> None: ) # Filter system libs - def filt_func(libname: Text) -> bool: + def filt_func(libname: str) -> bool: return not libname.startswith("/usr/lib") os.makedirs("subtree") @@ -443,7 +442,7 @@ def lib_filt(filename: str) -> bool: delocate_path("subtree5", "deplibs5", lib_filt_func="invalid-str") -def _make_bare_depends() -> Tuple[Text, Text]: +def _make_bare_depends() -> tuple[str, str]: # Copy: # * liba.dylib to 'libs' dir, which is a dependency of libb.dylib # * libb.dylib to 'subtree' dir, as 'libb' (no extension). @@ -481,12 +480,12 @@ def test_delocate_path_dylibs() -> None: # Callable, dylibs only, does not inspect liba, bare_b = _make_bare_depends() - def func(fn: Text) -> bool: + def func(fn: str) -> bool: return fn.endswith(".dylib") assert_equal(delocate_path("subtree", "deplibs", func), {}) - def func(fn: Text) -> bool: + def func(fn: str) -> bool: return fn.endswith("libb") assert_equal( @@ -499,7 +498,7 @@ def func(fn: Text) -> bool: def test_check_archs() -> None: # Test utility to check architectures in copied_libs dict # No libs always OK - s0: Set[Any] = set() + s0: set[Any] = set() assert_equal(check_archs({}), s0) # One lib to itself OK lib_M1_M1 = {LIBM1: {LIBM1: "install_name"}} diff --git a/delocate/tests/test_fuse.py b/delocate/tests/test_fuse.py index 4e76fa04..9ce86eae 100644 --- a/delocate/tests/test_fuse.py +++ b/delocate/tests/test_fuse.py @@ -48,7 +48,7 @@ def test_fuse_trees(): os.mkdir("tree2") fuse_trees("tree1", "tree2") assert_listdir_equal("tree1", []) - with open(pjoin("tree2", "afile.txt"), "wt") as fobj: + with open(pjoin("tree2", "afile.txt"), "w") as fobj: fobj.write("Some text") fuse_trees("tree1", "tree2") assert_listdir_equal("tree1", ["afile.txt"]) @@ -78,7 +78,7 @@ def test_fuse_trees(): assert_equal(get_archs(fused_fname), {"arm64", "x86_64"}) os.unlink(fused_fname) # A file not present in tree2 stays in tree1 - with open(pjoin("tree1", "anotherfile.txt"), "wt") as fobj: + with open(pjoin("tree1", "anotherfile.txt"), "w") as fobj: fobj.write("Some more text") fuse_trees("tree1", "tree2") assert_listdir_equal( diff --git a/delocate/tests/test_install_names.py b/delocate/tests/test_install_names.py index 01d55129..748f2245 100644 --- a/delocate/tests/test_install_names.py +++ b/delocate/tests/test_install_names.py @@ -1,20 +1,17 @@ """Tests for install name utilities.""" +from __future__ import annotations + import contextlib import os import shutil import sys +from collections.abc import Sequence from os.path import basename, dirname, exists from os.path import join as pjoin from subprocess import CompletedProcess from typing import ( - ContextManager, - Dict, NamedTuple, - Sequence, - Tuple, - Type, - Union, ) from unittest import mock @@ -198,7 +195,7 @@ def _copy_libs(lib_files, out_path): def assert_raises_if_exception( exception: object, -) -> ContextManager[object]: +) -> contextlib.AbstractContextManager[object]: """Return a pytest.raises context if `exception` is an Exception type.""" if isinstance(exception, type) and issubclass(exception, Exception): return pytest.raises(exception) @@ -208,11 +205,11 @@ def assert_raises_if_exception( class ToolArchMock(NamedTuple): """A group of expectations and mocks for otool-based function tests.""" - commands: Dict[Tuple[str, ...], str] # {command: stdout} + commands: dict[tuple[str, ...], str] # {command: stdout} "Subprocess commands and their expected stdout for mocking." - expected_install_names: Union[Sequence[str], Type[Exception]] + expected_install_names: Sequence[str] | type[Exception] "The expected return result of get_install_names." - expected_rpaths: Union[Sequence[str], Type[Exception]] + expected_rpaths: Sequence[str] | type[Exception] "The expected return result of get_rpaths." def mock_subprocess_run( diff --git a/delocate/tests/test_libsana.py b/delocate/tests/test_libsana.py index 037671bd..f0480772 100644 --- a/delocate/tests/test_libsana.py +++ b/delocate/tests/test_libsana.py @@ -7,9 +7,9 @@ import shutil import subprocess import sys +from collections.abc import Iterable from os.path import dirname, realpath, relpath, split from os.path import join as pjoin -from typing import Dict, Iterable, Text from unittest import mock import pytest @@ -47,7 +47,7 @@ from .test_wheelies import PLAT_WHEEL, PURE_WHEEL, RPATH_WHEEL, PlatWheel -def get_ext_dict(local_libs: Iterable[Text]) -> Dict[Text, Dict[Text, Text]]: +def get_ext_dict(local_libs: Iterable[str]) -> dict[str, dict[str, str]]: ext_deps = {} for ext_lib in EXT_LIBS: lib_deps = {} @@ -79,7 +79,7 @@ def test_tree_libs() -> None: # default - no filtering assert tree_libs(tmpdir) == exp_dict - def filt(fname: Text) -> bool: + def filt(fname: str) -> bool: return fname.endswith(".dylib") exp_dict = get_ext_dict([liba, libb, libc]) @@ -372,8 +372,8 @@ def test_get_rp_stripper() -> None: def get_ext_dict_stripped( - local_libs: Iterable[Text], start_path: Text -) -> Dict[Text, Dict[Text, Text]]: + local_libs: Iterable[str], start_path: str +) -> dict[str, dict[str, str]]: ext_dict = {} for ext_lib in EXT_LIBS: lib_deps = {} diff --git a/delocate/tests/test_scripts.py b/delocate/tests/test_scripts.py index 1b20c2f0..6062e65e 100644 --- a/delocate/tests/test_scripts.py +++ b/delocate/tests/test_scripts.py @@ -16,7 +16,6 @@ from os.path import basename, exists, realpath, splitext from os.path import join as pjoin from pathlib import Path -from typing import Text import pytest from pytest_console_scripts import ScriptRunner @@ -335,11 +334,11 @@ def test_fix_wheel_archs(script_runner: ScriptRunner) -> None: # Broken with one architecture removed archs = set(("x86_64", "arm64")) - def _fix_break(arch: Text) -> None: + def _fix_break(arch: str) -> None: _fixed_wheel(tmpdir) _thin_lib(stray_lib, arch) - def _fix_break_fix(arch: Text) -> None: + def _fix_break_fix(arch: str) -> None: _fixed_wheel(tmpdir) _thin_lib(stray_lib, arch) _thin_mod(fixed_wheel, arch) diff --git a/delocate/tests/test_tmpdirs.py b/delocate/tests/test_tmpdirs.py index 0118dc6c..f7041ed1 100644 --- a/delocate/tests/test_tmpdirs.py +++ b/delocate/tests/test_tmpdirs.py @@ -1,7 +1,5 @@ """Test tmpdirs module.""" -from __future__ import absolute_import, division, print_function - from os import getcwd from os.path import abspath, dirname, isfile, realpath diff --git a/delocate/tests/test_tools.py b/delocate/tests/test_tools.py index 70ab4c34..4c6d6ad4 100644 --- a/delocate/tests/test_tools.py +++ b/delocate/tests/test_tools.py @@ -1,7 +1,5 @@ """Test tools module.""" -from __future__ import division, print_function - import os import shutil import stat @@ -82,13 +80,13 @@ def test_ensure_permissions(): ("test.read", "A line\n"), ("test.write", "B line"), ): - with open(fname, "wt") as fobj: + with open(fname, "w") as fobj: fobj.write(contents) os.chmod(fname, 0) sts[fname] = chmod_perms(fname) def read_file(fname): - with open(fname, "rt") as fobj: + with open(fname) as fobj: contents = fobj.read() return contents @@ -96,7 +94,7 @@ def read_file(fname): non_read_file = ensure_permissions(stat.S_IWUSR)(read_file) def write_file(fname, contents): - with open(fname, "wt") as fobj: + with open(fname, "w") as fobj: fobj.write(contents) fixed_write_file = ensure_permissions(stat.S_IWUSR)(write_file) @@ -126,7 +124,7 @@ def write_file(fname, contents): def test_ensure_writable(): # Test ensure writable decorator with InTemporaryDirectory(): - with open("test.bin", "wt") as fobj: + with open("test.bin", "w") as fobj: fobj.write("A line\n") # Set to user rw, else r os.chmod("test.bin", 0o644) @@ -170,7 +168,7 @@ def test_parse_install_name() -> None: def _write_file(filename, contents): - with open(filename, "wt") as fobj: + with open(filename, "w") as fobj: fobj.write(contents) diff --git a/delocate/tests/test_wheelies.py b/delocate/tests/test_wheelies.py index 06e09a4d..9f17680b 100644 --- a/delocate/tests/test_wheelies.py +++ b/delocate/tests/test_wheelies.py @@ -43,7 +43,7 @@ def _collect_wheel(globber): glob_path = pjoin(DATA_PATH, globber) wheels = glob(glob_path) if len(wheels) == 0: - raise ValueError("No wheels for glob {}".format(glob_path)) + raise ValueError(f"No wheels for glob {glob_path}") elif len(wheels) > 1: raise ValueError( "Too many wheels for glob {} ({})".format( @@ -341,7 +341,7 @@ def test_patch_wheel() -> None: out_fname = basename(PURE_WHEEL) patch_wheel(PURE_WHEEL, WHEEL_PATCH, out_fname) zip2dir(out_fname, "wheel1") - with open(pjoin("wheel1", "fakepkg2", "__init__.py"), "rt") as fobj: + with open(pjoin("wheel1", "fakepkg2", "__init__.py")) as fobj: assert fobj.read() == 'print("Am in init")\n' # Check that wheel unpack works subprocess.run( @@ -350,13 +350,13 @@ def test_patch_wheel() -> None: # Copy the original, check it doesn't have patch shutil.copyfile(PURE_WHEEL, "copied.whl") zip2dir("copied.whl", "wheel2") - with open(pjoin("wheel2", "fakepkg2", "__init__.py"), "rt") as fobj: + with open(pjoin("wheel2", "fakepkg2", "__init__.py")) as fobj: assert fobj.read() == '"""Fake package."""\n' # Overwrite input wheel (the default) patch_wheel("copied.whl", WHEEL_PATCH) # Patched zip2dir("copied.whl", "wheel3") - with open(pjoin("wheel3", "fakepkg2", "__init__.py"), "rt") as fobj: + with open(pjoin("wheel3", "fakepkg2", "__init__.py")) as fobj: assert fobj.read() == 'print("Am in init")\n' # Check bad patch raises error with pytest.raises(RuntimeError): diff --git a/delocate/tests/test_wheeltools.py b/delocate/tests/test_wheeltools.py index 74ec0bf4..09699139 100644 --- a/delocate/tests/test_wheeltools.py +++ b/delocate/tests/test_wheeltools.py @@ -9,7 +9,7 @@ from os.path import basename, exists, isfile, realpath, splitext from os.path import join as pjoin from pathlib import Path -from typing import AnyStr, List, Tuple +from typing import AnyStr from zipfile import ZipFile import pytest @@ -75,7 +75,7 @@ def test_rewrite_record(): assert_record_equal(record_orig, record_new) # Test that signature gets deleted sig_fname = pjoin("wheel", dist_info_sdir, "RECORD.jws") - with open(sig_fname, "wt") as fobj: + with open(sig_fname, "w") as fobj: fobj.write("something") rewrite_record("wheel") with open_readable(record_fname, "rt") as fobj: @@ -146,7 +146,7 @@ def get_info(wheel_path: str | os.PathLike[str]) -> Message: def assert_winfo_similar( wheel_path: str | os.PathLike[str], - expected: List[Tuple[str, str]], + expected: list[tuple[str, str]], drop_version: bool = True, ) -> None: """Assert `wheel_path` has `.dist-info/WHEEL` items matching `expected`. diff --git a/delocate/tmpdirs.py b/delocate/tmpdirs.py index c828f390..ded05327 100644 --- a/delocate/tmpdirs.py +++ b/delocate/tmpdirs.py @@ -8,14 +8,12 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Contexts for *with* statement providing temporary directories.""" -from __future__ import absolute_import, division, print_function - import os import shutil from tempfile import mkdtemp, template -class TemporaryDirectory(object): +class TemporaryDirectory: r"""Create and return a temporary directory. This has the same behavior as mkdtemp but can be used as a context manager. @@ -75,15 +73,15 @@ def __enter__(self): """Chdir to the managed directory and then return its path.""" self._pwd = os.getcwd() os.chdir(self.name) - return super(InTemporaryDirectory, self).__enter__() + return super().__enter__() def __exit__(self, exc, value, tb): """Revert the working directory then delete the managed directory.""" os.chdir(self._pwd) - return super(InTemporaryDirectory, self).__exit__(exc, value, tb) + return super().__exit__(exc, value, tb) -class InGivenDirectory(object): +class InGivenDirectory: """Change directory to given directory for duration of ``with`` block. Useful when you want to use `InTemporaryDirectory` for the final test, but diff --git a/delocate/tools.py b/delocate/tools.py index 409950cb..f1bc16e3 100644 --- a/delocate/tools.py +++ b/delocate/tools.py @@ -10,6 +10,7 @@ import time import warnings import zipfile +from collections.abc import Iterable, Sequence from datetime import datetime from os import PathLike from os.path import exists, isdir @@ -17,16 +18,7 @@ from pathlib import Path from typing import ( Any, - Dict, - FrozenSet, - Iterable, - List, - Optional, - Sequence, - Set, - Tuple, TypeVar, - Union, ) T = TypeVar("T") @@ -39,10 +31,10 @@ class InstallNameError(Exception): def back_tick( - cmd: Union[str, Sequence[str]], + cmd: str | Sequence[str], ret_err: bool = False, as_str: bool = True, - raise_err: Optional[bool] = None, + raise_err: bool | None = None, ) -> Any: """Run command `cmd`, return stdout, or stdout, stderr if `ret_err`. @@ -90,9 +82,8 @@ def back_tick( proc = subprocess.run( cmd, shell=not cmd_is_seq, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=as_str, + capture_output=True, + text=as_str, check=raise_err, ) except subprocess.CalledProcessError as exc: @@ -142,8 +133,7 @@ def _run( try: return subprocess.run( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + capture_output=True, text=True, check=check, ) @@ -249,7 +239,7 @@ def modify(filename, *args, **kwargs): ) -def parse_install_name(line: str) -> Tuple[str, str, str]: +def parse_install_name(line: str) -> tuple[str, str, str]: """Parse a line of install name output. Parameters @@ -299,7 +289,7 @@ def parse_install_name(line: str) -> Tuple[str, str, str]: """ -def _parse_otool_listing(stdout: str) -> Dict[str, List[str]]: +def _parse_otool_listing(stdout: str) -> dict[str, list[str]]: '''Parse the output of otool lists. Parameters @@ -348,14 +338,14 @@ def _parse_otool_listing(stdout: str) -> Dict[str, List[str]]: RuntimeError: Input has duplicate architectures for ... ''' # noqa: D301 stdout = stdout.strip() - out: Dict[str, List[str]] = {} + out: dict[str, list[str]] = {} lines = stdout.split("\n") while lines: # Detect and parse the name/arch header line. match_arch = _OTOOL_ARCHITECTURE_RE.match(lines.pop(0)) if not match_arch: raise RuntimeError(f"Missing file/architecture header:\n{stdout}") - current_arch: Optional[str] = match_arch["architecture"] + current_arch: str | None = match_arch["architecture"] if current_arch is None: current_arch = "" if current_arch in out: @@ -370,7 +360,7 @@ def _parse_otool_listing(stdout: str) -> Dict[str, List[str]]: return out -def _check_ignore_archs(input: Dict[str, T]) -> T: +def _check_ignore_archs(input: dict[str, T]) -> T: """Merge architecture outputs for functions which don't support multiple. This is used to maintain backward compatibility inside of functions which @@ -419,7 +409,7 @@ def _check_ignore_archs(input: Dict[str, T]) -> T: def _parse_otool_install_names( stdout: str, -) -> Dict[str, List[Tuple[str, str, str]]]: +) -> dict[str, list[tuple[str, str, str]]]: '''Parse the stdout of 'otool -L' and return. Parameters @@ -452,7 +442,7 @@ def _parse_otool_install_names( ... """) {'': [('/usr/lib/libc++.1.dylib', '1.0.0', '905.6.0'), ('/usr/lib/libSystem.B.dylib', '1.0.0', '1292.100.5')]} ''' # noqa: E501, D301 - out: Dict[str, List[Tuple[str, str, str]]] = {} + out: dict[str, list[tuple[str, str, str]]] = {} for arch, install_names in _parse_otool_listing(stdout).items(): out[arch] = [parse_install_name(name) for name in install_names] return out @@ -522,11 +512,11 @@ def _line0_says_object(stdout_stderr: str, filename: str) -> bool: if further_report == "": return True raise InstallNameError( - 'Too ignorant to know what "{0}" means'.format(further_report) + f'Too ignorant to know what "{further_report}" means' ) -def get_install_names(filename: str) -> Tuple[str, ...]: +def get_install_names(filename: str) -> tuple[str, ...]: """Return install names from library named in `filename`. Returns tuple of install names. @@ -567,7 +557,7 @@ def get_install_names(filename: str) -> Tuple[str, ...]: return tuple(names) -def get_install_id(filename: str) -> Optional[str]: +def get_install_id(filename: str) -> str | None: """Return install id from library named in `filename`. Returns None if no install id, or if this is not an object file. @@ -593,7 +583,7 @@ def get_install_id(filename: str) -> Optional[str]: return _check_ignore_archs(install_ids) -def _get_install_ids(filename: str) -> Dict[str, str]: +def _get_install_ids(filename: str) -> dict[str, str]: """Return the install ids of a library. Parameters @@ -651,9 +641,7 @@ def set_install_name( """ names = get_install_names(filename) if oldname not in names: - raise InstallNameError( - "{0} not in install names for {1}".format(oldname, filename) - ) + raise InstallNameError(f"{oldname} not in install names for {filename}") _run( ["install_name_tool", "-change", oldname, newname, filename], check=True ) @@ -681,7 +669,7 @@ def set_install_id(filename: str, install_id: str, ad_hoc_sign: bool = True): RuntimeError if `filename` has not install id """ if get_install_id(filename) is None: - raise InstallNameError("{0} has no install id".format(filename)) + raise InstallNameError(f"{filename} has no install id") _run(["install_name_tool", "-id", install_id, filename], check=True) if ad_hoc_sign: replace_signature(filename, "-") @@ -690,7 +678,7 @@ def set_install_id(filename: str, install_id: str, ad_hoc_sign: bool = True): RPATH_RE = re.compile(r"path (?P.*) \(offset \d+\)") -def _parse_otool_rpaths(stdout: str) -> Dict[str, List[str]]: +def _parse_otool_rpaths(stdout: str) -> dict[str, list[str]]: '''Return the rpaths of the library `filename`. Parameters @@ -730,7 +718,7 @@ def _parse_otool_rpaths(stdout: str) -> Dict[str, List[str]]: ... """) {'x86_64': ['path/x86_64'], 'arm64': ['path/arm64']} ''' - rpaths: Dict[str, List[str]] = {} + rpaths: dict[str, list[str]] = {} for arch, lines in _parse_otool_listing(stdout).items(): rpaths[arch] = [] line_no = 0 @@ -748,7 +736,7 @@ def _parse_otool_rpaths(stdout: str) -> Dict[str, List[str]]: return rpaths -def get_rpaths(filename: str) -> Tuple[str, ...]: +def get_rpaths(filename: str) -> tuple[str, ...]: """Return a tuple of rpaths from the library `filename`. If `filename` is not a library then the returned tuple will be empty. @@ -933,12 +921,12 @@ def zip2dir( _ZIP_TIMESTAMP_MIN = 315532800 # 1980-01-01 00:00:00 UTC -_DateTuple = Tuple[int, int, int, int, int, int] +_DateTuple = tuple[int, int, int, int, int, int] def _get_zip_datetime( - date_time: Optional[_DateTuple] = None, -) -> Optional[_DateTuple]: + date_time: _DateTuple | None = None, +) -> _DateTuple | None: """Return ``SOURCE_DATE_EPOCH`` if set, otherwise return `date_time`. https://reproducible-builds.org/docs/source-date-epoch/ @@ -967,7 +955,7 @@ def dir2zip( *, compression: int = zipfile.ZIP_DEFLATED, compress_level: int = -1, - date_time: Optional[_DateTuple] = None, + date_time: _DateTuple | None = None, ) -> None: """Make a zip file `zip_fname` with contents of directory `in_dir`. @@ -1014,7 +1002,7 @@ def dir2zip( ) -def find_package_dirs(root_path: str) -> Set[str]: +def find_package_dirs(root_path: str) -> set[str]: """Find python package directories in directory `root_path`. Parameters @@ -1059,7 +1047,7 @@ def cmp_contents(filename1, filename2): return contents1 == contents2 -def get_archs(libname: str) -> FrozenSet[str]: +def get_archs(libname: str) -> frozenset[str]: """Return architecture types from library `libname`. Parameters @@ -1082,21 +1070,19 @@ def get_archs(libname: str) -> FrozenSet[str]: return frozenset() lines = [line.strip() for line in stdout.split("\n") if line.strip()] # For some reason, output from lipo -info on .a file generates this line - if lines[0] == "input file {0} is not a fat file".format(libname): + if lines[0] == f"input file {libname} is not a fat file": line = lines[1] else: assert len(lines) == 1 line = lines[0] for reggie in ( - "Non-fat file: {0} is architecture: (.*)".format(re.escape(libname)), - "Architectures in the fat file: {0} are: (.*)".format( - re.escape(libname) - ), + f"Non-fat file: {re.escape(libname)} is architecture: (.*)", + f"Architectures in the fat file: {re.escape(libname)} are: (.*)", ): match = re.match(reggie, line) if match is not None: return frozenset(match.groups()[0].split(" ")) - raise ValueError("Unexpected output: '{0}' for {1}".format(stdout, libname)) + raise ValueError(f"Unexpected output: '{stdout}' for {libname}") def lipo_fuse( diff --git a/delocate/wheeltools.py b/delocate/wheeltools.py index 82fd9a46..ec567416 100644 --- a/delocate/wheeltools.py +++ b/delocate/wheeltools.py @@ -11,12 +11,13 @@ import hashlib import os import sys +from collections.abc import Iterable from itertools import product from os import PathLike from os.path import abspath, basename, dirname, exists, relpath, splitext from os.path import join as pjoin from os.path import sep as psep -from typing import Iterable, Optional, Union, overload +from typing import overload from packaging.utils import parse_wheel_filename @@ -74,12 +75,12 @@ def skip(path): relative_path = relpath(path, bdist_dir) if skip(relative_path): hash = "" - size: Union[int, str] = "" + size: int | str = "" else: with open(path, "rb") as f: data = f.read() digest = hashlib.sha256(data).digest() - hash = "sha256=%s" % ( + hash = "sha256={}".format( base64.urlsafe_b64encode(digest).decode("ascii").strip("=") ) size = len(data) @@ -111,7 +112,7 @@ def __init__(self, in_wheel, out_wheel=None, ret_self=False): """ self.in_wheel = abspath(in_wheel) self.out_wheel = None if out_wheel is None else abspath(out_wheel) - super(InWheel, self).__init__() + super().__init__() def __enter__(self): """Unpack a wheel and return the path to its temporary directly. @@ -119,7 +120,7 @@ def __enter__(self): Will also chdir to the temporary directory. """ zip2dir(self.in_wheel, self.name) - return super(InWheel, self).__enter__() + return super().__enter__() def __exit__(self, exc, value, tb): """Write out the wheel based on the value of `out_wheel`, then cleanup. @@ -129,7 +130,7 @@ def __exit__(self, exc, value, tb): if self.out_wheel is not None: rewrite_record(self.name) dir2zip(self.name, self.out_wheel) - return super(InWheel, self).__exit__(exc, value, tb) + return super().__exit__(exc, value, tb) class InWheelCtx(InWheel): @@ -159,7 +160,7 @@ def __init__(self, in_wheel, out_wheel=None): filename of wheel to write after exiting. If None, don't write and discard """ - super(InWheelCtx, self).__init__(in_wheel, out_wheel) + super().__init__(in_wheel, out_wheel) self.wheel_path = None def __enter__(self): @@ -168,7 +169,7 @@ def __enter__(self): Will also chdir to the temporary directory. """ - self.wheel_path = super(InWheelCtx, self).__enter__() + self.wheel_path = super().__enter__() return self @@ -193,9 +194,9 @@ def add_platforms( def add_platforms( in_wheel: str, platforms: Iterable[str], - out_path: Optional[str] = None, + out_path: str | None = None, clobber: bool = False, -) -> Optional[str]: +) -> str | None: """Add platform tags `platforms` to `in_wheel` filename and WHEEL tags. Add any platform tags in `platforms` that are missing from `in_wheel` @@ -235,9 +236,7 @@ def add_platforms( out_wheel = pjoin(out_path, out_wheel_base + ext) if exists(out_wheel) and not clobber: raise WheelToolsError( - "Not overwriting {0}; set clobber=True to overwrite".format( - out_wheel - ) + f"Not overwriting {out_wheel}; set clobber=True to overwrite" ) with InWheelCtx(in_wheel) as ctx: info = read_pkg_info(info_fname) diff --git a/pyproject.toml b/pyproject.toml index bd7f6d5a..714b26d4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,11 +69,13 @@ line-length = 80 [tool.ruff.lint] # https://docs.astral.sh/ruff/rules/ select = [ - "E", # pycodestyle - "W", # pycodestyle - "F", # Pyflakes - "I", # isort - "D", # pydocstyle + "E", # pycodestyle + "W", # pycodestyle + "F", # Pyflakes + "I", # isort + "D", # pydocstyle + "UP", # pyupgrade + "FA", # flake8-future-annotations ] ignore = [ "ANN101", # missing-type-self From d2115f415998fb9929db8877513a64acf4990267 Mon Sep 17 00:00:00 2001 From: Kyle Benesch <4b796c65+github@gmail.com> Date: Thu, 29 Aug 2024 16:43:24 -0700 Subject: [PATCH 4/5] Add dropped Python versions to changelog --- Changelog.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/Changelog.md b/Changelog.md index d6984b9c..dd72765b 100644 --- a/Changelog.md +++ b/Changelog.md @@ -10,6 +10,11 @@ rules on making a good Changelog. ## [Unreleased] +### Removed + +- Dropped support for Python 3.7 and Python 3.8. + [#226](https://github.com/matthew-brett/delocate/pull/226) + ## [0.12.0] - 2024-08-29 ### Added From 85e4719537a0e1865654a942b0a3e017082ec815 Mon Sep 17 00:00:00 2001 From: Kyle Benesch <4b796c65+github@gmail.com> Date: Mon, 14 Oct 2024 23:24:42 -0700 Subject: [PATCH 5/5] Refactor patch_wheel function Remove `raise ValueError` on missing patch file. This was always extraneous since the open call would've raised `FileNotFoundError` on its own. Normally I wouldn't add an undocumented raise to the changelog, but I'm being safe. Convert to pathlib and use str types for Popen. --- Changelog.md | 5 +++++ delocate/delocating.py | 31 +++++++++++++------------------ 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/Changelog.md b/Changelog.md index dd72765b..771da881 100644 --- a/Changelog.md +++ b/Changelog.md @@ -10,6 +10,11 @@ rules on making a good Changelog. ## [Unreleased] +### Changed + +- `patch_wheel` function raises `FileNotFoundError` instead of `ValueError` on + missing patch files. + ### Removed - Dropped support for Python 3.7 and Python 3.8. diff --git a/delocate/delocating.py b/delocate/delocating.py index ca1e652a..dd755551 100644 --- a/delocate/delocating.py +++ b/delocate/delocating.py @@ -1092,7 +1092,9 @@ def delocate_wheel( def patch_wheel( - in_wheel: str, patch_fname: str, out_wheel: str | None = None + in_wheel: str | os.PathLike[str], + patch_fname: str | os.PathLike[str], + out_wheel: str | os.PathLike[str] | None = None, ) -> None: """Apply ``-p1`` style patch in `patch_fname` to contents of `in_wheel`. @@ -1101,32 +1103,25 @@ def patch_wheel( Parameters ---------- - in_wheel : str + in_wheel : str or PathLike Filename of wheel to process - patch_fname : str + patch_fname : str or PathLike Filename of patch file. Will be applied with ``patch -p1 < patch_fname`` - out_wheel : None or str + out_wheel : None or str or PathLike Filename of patched wheel to write. If None, overwrite `in_wheel` """ - in_wheel = abspath(in_wheel) - patch_fname = abspath(patch_fname) - if out_wheel is None: - out_wheel = in_wheel - else: - out_wheel = abspath(out_wheel) - if not exists(patch_fname): - raise ValueError(f"patch file {patch_fname} does not exist") + in_wheel = Path(in_wheel).resolve(strict=True) + patch_fname = Path(patch_fname).resolve(strict=True) + out_wheel = Path(out_wheel).resolve() if out_wheel is not None else in_wheel with InWheel(in_wheel, out_wheel): - with open(patch_fname, "rb") as fobj: + with open(patch_fname, "rb") as f: patch_proc = Popen( - ["patch", "-p1"], stdin=fobj, stdout=PIPE, stderr=PIPE + ["patch", "-p1"], stdin=f, stdout=PIPE, stderr=PIPE, text=True ) - stdout, stderr = patch_proc.communicate() + stdout, _stderr = patch_proc.communicate() if patch_proc.returncode != 0: - raise RuntimeError( - "Patch failed with stdout:\n" + stdout.decode("latin1") - ) + raise RuntimeError(f"Patch failed with stdout:\n{stdout}") _ARCH_LOOKUP = {"intel": ["i386", "x86_64"], "universal2": ["x86_64", "arm64"]}