diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b5830615a5..d393c3c415 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -50,11 +50,6 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Set Python 2.7 - uses: actions/setup-python@v3 - with: - python-version: 2.7 - architecture: ${{ matrix.arch }} - name: Set Python 3.6 uses: actions/setup-python@v3 with: @@ -78,6 +73,12 @@ jobs: with: python-version: 3.9 architecture: ${{ matrix.arch }} + - name: Set Python 3.10 + uses: actions/setup-python@v2 + if: matrix.python-version != '3.10' + with: + python-version: "3.10" + architecture: ${{ matrix.arch }} - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v3 with: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d695ed686b..579f0b544b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,7 +5,6 @@ repos: rev: 22.3.0 hooks: - id: black - exclude: ^pdm/_vendor - repo: https://github.com/PyCQA/flake8 rev: 4.0.1 @@ -26,5 +25,4 @@ repos: args: [pdm] pass_filenames: false additional_dependencies: - - types-setuptools - - types-toml + - types-requests diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 125cd63dce..ec307f023d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -75,6 +75,20 @@ pdm run test The test suite is still simple and needs expansion! Please help write more test cases. +!!! note + You can also run your test suite against all supported Python version using `tox` with the `tox-pdm` plugin. + You can either run it by yourself with: + + ```shell + tox + ``` + + or from `pdm` with: + + ```shell + pdm run tox + ``` + ### Code style PDM uses `pre-commit` for linting. Install `pre-commit` first, then: diff --git a/docs/docs/configuration.md b/docs/docs/configuration.md index d6ae238968..77d05bb898 100644 --- a/docs/docs/configuration.md +++ b/docs/docs/configuration.md @@ -1,6 +1,6 @@ ## Available Configurations -The following configuration items can be retrieved and modified by `pdm config` command. +The following configuration items can be retrieved and modified by [`pdm config`](usage/cli_reference.md#exec-0--config) command. | Config Item | Description | Default Value | Available in Project | Env var | | ----------------------------- | ------------------------------------------------------------------------- | ------------------------------------------------------------------------- | -------------------- | ------------------------ | @@ -8,8 +8,8 @@ The following configuration items can be retrieved and modified by `pdm config` | `cache_dir` | The root directory of cached files | The default cache location on OS | No | | | `check_update` | Check if there is any newer version available | True | No | | | `global_project.fallback` | Use the global project implicitly if no local project is found | `False` | No | | -| `global_project.path` | The path to the global project | `~/.pdm/global-project` | No | | -| `global_project.user_site` | Whether to install to user site | `False` | No | | +| `global_project.path` | The path to the global project | `/global-project` | No | | +| `global_project.user_site` | Whether to install to user site | `False` | No | | | `install.cache` | Enable caching of wheel installations | False | Yes | | | `install.cache_method` | Specify how to create links to the caches(`symlink` or `pth`) | `symlink` | Yes | | | `install.parallel` | Whether to perform installation and uninstallation in parallel | `True` | Yes | `PDM_PARALLEL_INSTALL` | diff --git a/docs/docs/pyproject/build.md b/docs/docs/pyproject/build.md new file mode 100644 index 0000000000..1539e1fef4 --- /dev/null +++ b/docs/docs/pyproject/build.md @@ -0,0 +1,249 @@ +# Build Configuration + +`pdm` uses the [PEP 517](https://www.python.org/dev/peps/pep-0517/) to build the package. +A build backend is what drives the build system to build source distributions and wheels from arbitrary source trees. + +`pdm` also ships with its own build backend, [`pdm-pep517`](https://pypi.org/project/pdm-pep517/). Besides the [PEP 621 project meta](pep621.md), it also reads some additional configurations to control the build behavior. To use it, include the following in your `pyproject.toml`(It will be done automatically if you use the [`pdm init`](../usage/cli_reference.md#exec-0--init) or [`pdm import`](../usage/cli_reference.md#exec-0--import) to create the file): + +```toml +[build-system] +requires = ["pdm-pep517"] +build-backend = "pdm.pep517.api" +``` + +## Dynamic versioning + +`pdm-pep517` supports dynamic versions from two sources. To enable dynamic versioning, remember to include `version` in the `dynamic` field of PEP 621 metadata: + +```toml +[project] +... +dynamic = ["version"] +``` + +### Dynamic version from file + +```toml +[tool.pdm] +version = { source = "file", path = "mypackage/__version__.py" } +``` + +The backend will search for the pattern `__version__ = "{version}"` in the given file and use the value as the version. + +!!! TIP + + Thanks to the TOML syntax, the above example is equivalent to the following: + + ```toml + [tool.pdm.version] + source = "file" + path = "mypackage/__version__.py" + ``` + Or: + ```toml + [tool.pdm] + version.source = "file" + version.path = "mypackage/__version__.py" + ``` + +### Dynamic version from SCM + +If you've used [`setuptools-scm`](https://pypi.org/project/setuptools-scm/) you will be familiar with this approach. `pdm-pep517` can also read the version from the tag of your SCM repository: + +```toml +[tool.pdm] +version = { source = "scm" } +``` + +#### Specify the version manually + +When building the package, `pdm-pep517` will require the SCM to be available to populate the version. If that is not the case, you can still specify the version with an environment variable `PDM_PEP517_SCM_VERSION`: + +```bash +export PDM_PEP517_SCM_VERSION="1.2.3" +pdm build +``` + +#### Write the version to file + +For dynamic version read from SCM, it would be helpful to write the evaluated value to a file when building a wheel, so that you do not need `importlib.metadata` to get the version in code. + +```toml +[tool.pdm.version] +source = "scm" +write-to = "mypackage/__version__.py" +write-template = "__version__ = '{}'" # optional, default to "{}" +``` + +For source distributions, the version will be *frozen* and converted to a static version in the `pyproject.toml` file, which will be included in the distribution. + + +## Include and exclude files + +To include extra files and/or exclude files from the distribution, given the paths in `includes` and `excludes` configuration, as glob patterns: + +```toml +[tool.pdm.build] +includes = [ + "**/*.json", + "mypackage/", +] +excludes = [ + "mypackage/_temp/*" +] +``` + +In case you may want some files to be included in sdist only, use the `source-includes` field: + +```toml +[tool.pdm.build] +includes = [...] +excludes = [...] +source-includes = ["tests/"] +``` + +Note that the files defined in `source-includes` will be **excluded** automatically from binary distributions. + +### Default values for includes and excludes + +If you don't specify any of these fields, PDM can determine the values for you to fit the most common workflows, in the following manners: + +- Top-level packages will be included. +- `tests` package will be excluded from **non-sdist** builds. +- `src` directory will be detected as the `package-dir` if it exists. + +If your project follows the above conventions you don't need to config any of these fields and it just works. +Be aware PDM won't add [PEP 420 implicit namespace packages](https://www.python.org/dev/peps/pep-0420/) automatically and they should always be specified in `includes` explicitly. + +## Select another package directory to look for packages + +Similar to `setuptools`' `package_dir` setting, one can specify another package directory, such as `src`, in `pyproject.toml` easily: + +```toml +[tool.pdm.build] +package-dir = "src" +``` + +If no package directory is given, PDM can also recognize `src` as the `package-dir` implicitly if: + +1. `src/__init__.py` doesn't exist, meaning it is not a valid Python package, and +2. There exist some packages under `src/*`. + +## Implicit namespace packages + +As specified in [PEP 420](https://www.python.org/dev/peps/pep-0420), a directory will be recognized as a namespace package if: + +1. `/__init__.py` doesn't exist, and +2. There exist normal packages and/or other namespace packages under `/*`, and +3. `` is explicitly listed in `includes` + +## Custom file generation + +During the build, you may want to generate other files or download resources from the internet. You can achieve this by the `setup-script` build configuration: + +```toml +[tool.pdm.build] +setup-script = "build.py +``` + +In the `build.py` script, `pdm-pep517` looks for a `build` function and calls it with two arguments: + +- `src`: (str) the path to the source directory +- `dst`: (str) the path to the distribution directory + +Example: + +```python +# build.py +def build(src, dst): + target_file = os.path.join(dst, "mypackage/myfile.txt") + os.makedirs(os.path.dirname(target_file), exist_ok=True) + download_file_to(dst) +``` + +The generated file will be copied to the resulted wheel with the same hierarchy, you need to create the parent directories if necessary. + +## Build Platform-specific Wheels + +`setup-script` can also be used to build platform-specific wheels, such as C extensions. Currently, building C extensions still relies on `setuptools`. + +Set the `run-setuptools` to `true` under `setup-script`, and `pdm-pep517` will generate a `setup.py` with the custom `build` function in the script and run `python setup.py build` to build extensions. + +In the `build.py` script, the expected `build` function receives the argument dictionary to be passed to the `setup()` call. In the function, you can update the dictionary with any additional or changed values as you want. + +Here is an example adapted to build `MarkupSafe`: + +```python +# build.py +from setuptools import Extension + +ext_modules = [ + Extension("markupsafe._speedups", ["src/markupsafe/_speedups.c"]) +] + +def build(setup_kwargs): + setup_kwargs.update(ext_modules=ext_modules) +``` + +The build configuration should look like: + +```toml +# pyproject.toml +[tool.pdm.build] +setup-script = "build.py" +run-setuptools = true +``` + +If you run [`pdm build`](../usage/cli_reference.md#exec-0--build)(or any other build frontends such as [build](https://pypi.org/project/build)), PDM will build a platform-specific wheel file as well as a sdist. + +By default, every build is performed in a clean and isolated environment, only build requirements can be seen. If your build has optional requirements that depend on the project environment, you can turn off the environment isolation by `pdm build --no-isolation` or setting config `build_isolation` to falsey value. + +### Override the "Is-Purelib" value + +Sometimes you may want to build platform-specific wheels but don't have a build script(the binaries may be built or fetched by other tools). In this case +you can set the `is-purelib` value in the `pyproject.toml` to `false`: + +```toml +[tool.pdm.build] +is-purelib = false +``` + +If this value is not specified, `pdm-pep517` will build platform-specific wheels if `run-setuptools` is `true`, therefore `is-purelib` is `true` in this case. + +## Editable build backend + +PDM implements [PEP 660](https://www.python.org/dev/peps/pep-0660/) to build wheels for editable installation. +One can choose how to generate the wheel out of the two methods: + +- `path`: (Default)The legacy method used by setuptools that create .pth files under the packages path. +- `editables`: Create proxy modules under the packages path. Since the proxy module is looked for at runtime, it may not work with some static analysis tools. + +Read the PEP for the difference of the two methods and how they work. + +Specify the method in pyproject.toml like below: + +```toml +[tool.pdm.build] +editable-backend = "path" +``` + +`editables` backend is more recommended but there is a known limitation that it can't work with PEP 420 namespace packages. +So you would need to change to `path` in that case. + +!!! note "About Python 2 compatibility" + Due to the fact that the build backend for PDM managed projects requires Python>=3.6, you would not be able to + install the current project if Python 2 is being used as the host interpreter. You can still install other dependencies not PDM-backed. + +## Use other PEP 517 backends + +Apart from `pdm-pep517`, `pdm` plays well with any PEP 517 build backends that comply with PEP 621 specification. At the time of writing, [`flit`](https://pypi.org/project/flit)(backend: `flit-core`) and [`hatch`](https://pypi.org/project/hatch)(backend: `hatchling`) are working well with PEP 621 and [`setuptools`](https://pypi.org/project/setuptools) has experimental support. To use one of them, you can specify the backend in the `pyproject.toml`: + +```toml +[build-system] +requires = ["flit_core >=2,<4"] +build-backend = "flit_core.buildapi" +``` + +PDM will call the correct backend when doing [`pdm build`](../usage/cli_reference.md#exec-0--build). + + diff --git a/docs/docs/pyproject/pep621.md b/docs/docs/pyproject/pep621.md index c6edfe238c..443bfc09d0 100644 --- a/docs/docs/pyproject/pep621.md +++ b/docs/docs/pyproject/pep621.md @@ -1,8 +1,10 @@ # PEP 621 Metadata -The standard format and all metadata fields are defined by [PEP 621](https://www.python.org/dev/peps/pep-0621/) and -[PEP 639](https://www.python.org/dev/peps/pep-0639/). -View the PEPs for the detailed specification. These metadata are stored in `[project]` table of `pyproject.toml`. +The project metadata are stored in the `pyproject.toml`. The specifications are defined by [PEP 621], [PEP 631] and [PEP 639]. Read the detailed specifications in the PEPs. + +[PEP 621]: https://www.python.org/dev/peps/pep-0621/ +[PEP 631]: https://www.python.org/dev/peps/pep-0631/ +[PEP 639]: https://www.python.org/dev/peps/pep-0639/ _In the following part of this document, metadata should be written under `[project]` table if not given explicitly._ @@ -24,32 +26,29 @@ description = """\ See [TOML's specification on strings](https://toml.io/en/v1.0.0#string). -## Determine the package version dynamically +## Package version -The package version can be retrieved from the `__version__` variable of a given file. To do this, put the following -under the `[tool.pdm]` table: +=== "Static" -```toml -[tool.pdm] -version = {from = "mypackage/__init__.py"} -``` + ```toml + [project] + version = "1.0.0" + ``` -Remember set `dynamic = ["version"]` in `[project]` metadata. +=== "Dynamic" -PDM can also read version from SCM tags. If you are using git or hg as the version control system, define the -`version` as follows: + ```toml + [project] + ... + dynamic = ["version"] -```toml -[tool.pdm] -version = {use_scm = true} -``` + [tool.pdm] + version = { source = "file", path = "mypackage/__version__.py" } + ``` -In either case, you MUST delete the `version` field from the `[project]` table, and include `version` -in the `dynamic` field, or the backend will raise an error: + The version will be read from the `mypackage/__version__.py` file searching for the pattern: `__version__ = "{version}"`. -```toml -dynamic = ["version"] -``` + Read more information about other configurations in [dynamic versioning](build.md#dynamic-versioning). ## Dependency specification @@ -59,6 +58,8 @@ and [PEP 508](https://www.python.org/dev/peps/pep-0508/). Examples: ```toml +[project] +... dependencies = [ # Named requirement "requests", @@ -71,26 +72,6 @@ dependencies = [ ] ``` -## Editable requirement - -Beside of the normal dependency specifications, one can also have some packages installed in editable mode. The editable specification string format -is the same as [Pip's editable install mode](https://pip.pypa.io/en/stable/cli/pip_install/#editable-installs). - -Examples: - -``` -dependencies = [ - ..., - "-e path/to/SomeProject", - "-e git+http://repo/my_project.git#egg=SomeProject" -] -``` - -!!! note "About editable installation" - One can have editable installation and normal installation for the same package. The one that comes at last wins. - However, editable dependencies WON'T be included in the metadata of the built artifacts since they are not valid - PEP 508 strings. They only exist for development purpose. - ## Optional dependencies You can have some requirements optional, which is similar to `setuptools`' `extras_require` parameter. diff --git a/docs/docs/pyproject/tool-pdm.md b/docs/docs/pyproject/tool-pdm.md index 7c0cb605d9..aa558cbafb 100644 --- a/docs/docs/pyproject/tool-pdm.md +++ b/docs/docs/pyproject/tool-pdm.md @@ -1,6 +1,6 @@ # PDM Tool Settings -There are also some useful settings to control the packaging behavior of PDM. They should be shipped with `pyproject.toml`, defined in `[tool.pdm]` table. +There are also some useful settings to control the behaviors of PDM in various aspects. They should be stored in `pyproject.toml`, defined in `[tool.pdm]` table. ## Development dependencies @@ -17,13 +17,24 @@ test = ["pytest", "pytest-cov"] doc = ["mkdocs"] ``` -To install all of them: +Editable dependencies are also allowed in `dev-dependencies`. to define an editable dependency, prefix it with `-e `: -```bash -pdm install +```toml +[tool.pdm.dev-dependencies] +editable = [ + "-e git+https://github.com/pallets/click.git@main#egg=click", # VCS link + "-e ./mypackage/", # local package +] ``` -For more CLI usage, please refer to [Manage Dependencies](../usage/dependency.md) +## Allow prereleases in resolution result + +By default, `pdm`'s dependency resolver will ignore prereleases unless there are no stable versions for the given version range of a dependency. This behavior can be changed by setting `allow_prereleases` to `true` in `[tool.pdm]` table: + +```toml +[tool.pdm] +allow_prereleases = true +``` ## Specify other sources for finding packages @@ -36,9 +47,11 @@ verify_ssl = true name = "internal" ``` -This works as if `--extra-index-url https://private-site.org/pypi/simple` is passed. +With this, the PyPI index and the above internal source will be searched for packages. It basically does the same as passing `--extra-index-url https://private-site.org/pypi/simple` to `pip install` command. -Or you can override the `pypi.url` value by using a source named `pypi`: +### Disable the PyPI repository + +If you want to omit the default PyPI index, just set the source name to `pypi` and that source will **replace** it. ```toml [[tool.pdm.source]] @@ -47,7 +60,9 @@ verify_ssl = true name = "pypi" ``` -By default, or sources are [PEP 503](https://www.python.org/dev/peps/pep-0503/) style "index urls" like pip's `--index-url` and `--extra-url`, however, you can also specify "find links" with +### Find links source + +By default, all sources are [PEP 503](https://www.python.org/dev/peps/pep-0503/) style "indexes" like pip's `--index-url` and `--extra-index-url`, however, you can also specify "find links" with `type = "find_links"`. See [this answer](https://stackoverflow.com/a/46651848) for the difference between the two types. For example, to install from a local directory containing package files: @@ -64,122 +79,14 @@ type = "find_links" write it in the `[[tool.pdm.source]]`. Otherwise if you would like to change the index temporarily on the current platform (for network reasons), you should use `pdm config pypi.url https://private.pypi.org/simple`. -## Include and exclude package files - -The way of specifying include and exclude files are simple, they are given as a list of glob patterns: - -```toml -includes = [ - "**/*.json", - "mypackage/", -] -excludes = [ - "mypackage/_temp/*" -] -``` - -In case you want some files to be included in sdist only, you use the `source-includes` field: - -```toml -includes = [...] -excludes = [...] -source-includes = ["tests/"] -``` - -Note that the files defined in `source-includes` will be **excluded** automatically from non-sdist builds. - -### Default values for includes and excludes - -If you don't specify any of these fields, PDM also provides smart default values to fit the most common workflows. - -- Top-level packages will be included. -- `tests` package will be excluded from **non-sdist** builds. -- `src` directory will be detected as the `package-dir` if it exists. - -If your project follows the above conventions you don't need to config any of these fields and it just works. -Be aware PDM won't add [PEP 420 implicit namespace packages](https://www.python.org/dev/peps/pep-0420/) automatically and they should always be specified in `includes` explicitly. - -## Select another package directory to look for packages - -Similar to `setuptools`' `package_dir` setting, one can specify another package directory, such as `src`, in `pyproject.toml` easily: - -```toml -package-dir = "src" -``` - -If no package directory is given, PDM can also recognize `src` as the `package-dir` implicitly if: - -1. `src/__init__.py` doesn't exist, meaning it is not a valid Python package, and -2. There exist some packages under `src/*`. - -## Implicit namespace packages - -As specified in [PEP 420](https://www.python.org/dev/peps/pep-0420), a directory will be recognized as a namespace package if: - -1. `/__init__.py` doesn't exist, and -2. There exist normal packages and/or other namespace packages under `/*`, and -3. `` is explicitly listed in `includes` - -## Build Platform-specific Wheels - -You may want to build platform-specific wheels if it contains binaries. Currently, building C extensions still relies on `setuptools`. -You should write a python script with a function named `build` which accepts the ``kwargs`` of `setup()` as the argument. -Then, update the dictionary with your `ext_modules` settings in the function. - -Here is an example taken from `MarkupSafe`: - -```python -# build.py -from setuptools import Extension - -ext_modules = [Extension("markupsafe._speedups", ["src/markupsafe/_speedups.c"])] - -def build(setup_kwargs): - setup_kwargs.update(ext_modules=ext_modules) -``` - -Now, specify the build script path via `build` in the `pyproject.toml`: - -```toml -# pyproject.toml -[tool.pdm] -build = "build.py" -``` - -If you run `pdm build`(or any other build frontends such as [build](https://pypi.org/project/build)), PDM will build a platform-specific wheel file as well as a sdist. -By default, every build is performed in a clean and isolated environment, only build requirements can be seen. If your build has optional requirements that depend on the project environment, you can turn off the environment isolation by `pdm build --no-isolation` or setting config `build_isolation` to falsey value. +### Respect the order of the sources -### Override the "Is-Purelib" value +By default, all sources are considered equal, packages from them are sorted by the version and wheel tags, the most matching one with the highest version is selected. -Sometimes you may want to build platform-specific wheels but don't have a build script(the binaries may be built or fetched by other tools). In this case -you can set the `is-purelib` value in the `pyproject.toml` to `false`: +In some cases you may want to return packages from the preferred source, and search for others if they are missing from the former source. PDM supports this by reading the configuration `respect-source-order`: ```toml -[tool.pdm] -is-purelib = false +[tool.pdm.resolution] +respect-source-order = true ``` - -## Editable build backend - -PDM leverages [PEP 660](https://www.python.org/dev/peps/pep-0660/) to build wheels for editable installation. -One can choose how to generate the wheel out of the two methods: - -- `path`: (Default)The legacy method used by setuptools that create .pth files under the packages path. -- `editables`: Create proxy modules under the packages path. Since the proxy module is looked for at runtime, it may not work with some static analysis tools. - -Read the PEP for the difference of the two methods and how they work. - -Specify the method in pyproject.toml like below: - -```toml -[tool.pdm] -editable-backend = "path" -``` - -`editables` backend is more recommended but there is a known limitation that it can't work with PEP 420 namespace packages. -So you would need to change to `path` in that case. - -!!! note "About Python 2 compatibility" - Due to the fact that the build backend for PDM managed projects requires Python>=3.6, you would not be able to - install the current project if Python 2 is being used as the host interpreter. You can still install other dependencies not PDM-backed. diff --git a/docs/docs/usage/advanced.md b/docs/docs/usage/advanced.md index 67f261083e..272fd2f2a4 100644 --- a/docs/docs/usage/advanced.md +++ b/docs/docs/usage/advanced.md @@ -28,10 +28,10 @@ commands = ``` To use the virtualenv created by Tox, you should make sure you have set `pdm config use_venv true`. PDM then will install -dependencies from `pdm.lock` into the virtualenv. In the dedicated venv you can directly run tools by `pytest tests/` instead +dependencies from [`pdm lock`](cli_reference.md#exec-0--lock) into the virtualenv. In the dedicated venv you can directly run tools by `pytest tests/` instead of `pdm run pytest tests/`. -You should also make sure you don't run `pdm add/pdm remove/pdm update/pdm lock` in the test commands, otherwise the `pdm.lock` +You should also make sure you don't run `pdm add/pdm remove/pdm update/pdm lock` in the test commands, otherwise the [`pdm lock`](cli_reference.md#exec-0--lock) file will be modified unexpectedly. Additional dependencies can be supplied with the `deps` config. Besides, `isolated_buid` and `passenv` config should be set as the above example to make PDM work properly. @@ -94,7 +94,7 @@ Before running nox, you should also `pdm config use_venv true` to enable venv re ### About PEP 582 `__pypackages__` directory -By default, if you run tools by `pdm run`, `__pypackages__` will be seen by the program and all subprocesses created by it. This means virtual environments created by those tools are also aware of the packages inside `__pypackages__`, which result in unexpected behavior in some cases. +By default, if you run tools by [`pdm run`](cli_reference.md#exec-0--run), `__pypackages__` will be seen by the program and all subprocesses created by it. This means virtual environments created by those tools are also aware of the packages inside `__pypackages__`, which result in unexpected behavior in some cases. For `nox`, you can avoid this by adding a line in `noxfile.py`: ```python @@ -147,18 +147,6 @@ Testing: export HOME=/tmp/home ``` -## Use other PEP 517 backends - -PDM supports ALL PEP 517 build backends that comply with PEP 621 specification. At the time of writing, `flit` is going to switch to PEP 621 metadata in the near future, then you can keep `flit` as the build-backend while still managing dependencies using PDM: - -```toml -[build-system] -requires = ["flit_core >=2,<4"] -build-backend = "flit_core.buildapi" -``` - -PDM will call the correct backend when `pdm build`. - ## Use PDM in a multi-stage Dockerfile It is possible to use PDM in a multi-stage Dockerfile to first install the project and dependencies into `__pypackages__` diff --git a/docs/docs/usage/cli_reference.md b/docs/docs/usage/cli_reference.md new file mode 100644 index 0000000000..364d891c41 --- /dev/null +++ b/docs/docs/usage/cli_reference.md @@ -0,0 +1,52 @@ +# CLI Reference + +```python exec="true" +import argparse +from pdm.core import Core + +parser = Core().parser + + +def render_parser( + parser: argparse.ArgumentParser, title: str, heading_level: int = 2 +) -> str: + """Render the parser help documents as a string.""" + result = [f"{'#' * heading_level} {title}\n"] + if parser.description and title != "pdm": + result.append("> " + parser.description + "\n") + + for group in sorted( + parser._action_groups, key=lambda g: g.title.lower(), reverse=True + ): + if not any( + bool(action.option_strings or action.dest) + or isinstance(action, argparse._SubParsersAction) + for action in group._group_actions + ): + continue + + result.append(f"{group.title.title()}:\n") + for action in group._group_actions: + if isinstance(action, argparse._SubParsersAction): + for name, subparser in action._name_parser_map.items(): + result.append(render_parser(subparser, name, heading_level + 1)) + continue + + opts = [f"`{opt}`" for opt in action.option_strings] + if not opts: + line = f"- `{action.dest}`" + else: + line = f"- {', '.join(opts)}" + if action.metavar: + line += f" `{action.metavar}`" + line += f": {action.help}" + if action.default and action.default != argparse.SUPPRESS: + line += f"(default: `{action.default}`)" + result.append(line) + result.append("") + + return "\n".join(result) + + +print(render_parser(parser, "pdm")) +``` diff --git a/docs/docs/usage/dependency.md b/docs/docs/usage/dependency.md index 01883eddc4..1821ec9c30 100644 --- a/docs/docs/usage/dependency.md +++ b/docs/docs/usage/dependency.md @@ -37,7 +37,7 @@ For details of the meaning of each field in `pyproject.toml`, please refer to [P pdm add requests ``` -`pdm add` can be followed by one or several dependencies, and the dependency specification is described in +[`pdm add`](cli_reference.md#exec-0--add) can be followed by one or several dependencies, and the dependency specification is described in [PEP 508](https://www.python.org/dev/peps/pep-0508/). PDM also allows extra dependency groups by providing `-G/--group ` option, and those dependencies will go to @@ -45,7 +45,7 @@ PDM also allows extra dependency groups by providing `-G/--group ` option, After that, dependencies and sub-dependencies will be resolved properly and installed for you, you can view `pdm.lock` to see the resolved result of all dependencies. -### Add local dependencies +### Local dependencies Local packages can be added with their paths. The path can be a file or a directory: @@ -56,8 +56,30 @@ pdm add ./first-1.0.0-py2.py3-none-any.whl The paths MUST start with a `.`, otherwise it will be recognized as a normal named requirement. -In addition, **a local directory** can also be installed in [editable mode](https://pip.pypa.io/en/stable/cli/pip_install/#editable-installs) -(just like `pip install -e ` would) using `pdm add -e/--editable `. + +### VCS dependencies + +You can also install from a git repository url or other version control systems. The following are supported: + +- Git: `git` +- Mercurial: `hg` +- Subversion: `svn` +- Bazaar: `bzr` + +The URL should be like: `{vcs}+{url}@{rev}` + +Examples: + +```console +# Install pip repo on tag `22.0` +pdm add "git+https://github.com/pypa/pip.git@22.0" +# Provide credentials in the URL +pdm add "git+https://username:password@github.com/username/private-repo.git@master" +# Give a name to the dependency +pdm add "pip @ git+https://github.com/pypa/pip.git@22.0" +# Or use the #egg fragment +pdm add "git+https://github.com/pypa/pip.git@22.0#egg=pip" +``` ### Add development only dependencies @@ -81,7 +103,20 @@ test = ["pytest"] For backward-compatibility, if only `-d` or `--dev` is specified, dependencies will go to `dev` group under `[tool.pdm.dev-dependencies]` by default. !!! NOTE - The same group name MUST NOT appear in both `[tool.pdm.dev-dependencies]` and `[project.optional-dependencies]` . + The same group name MUST NOT appear in both `[tool.pdm.dev-dependencies]` and `[project.optional-dependencies]`. + +### Editable dependencies + +**Local directories** and **VCS dependencies** can be installed in [editable mode](https://pip.pypa.io/en/stable/cli/pip_install/#editable-installs). If you are familiar with `pip`, it is just like `pip install -e `. **Editable packages are allowed only in development dependencies**: + +```console +# A relative path to the directory +pdm add -e ./sub-package +# A file URL to a local directory +pdm add -e file:///path/to/sub-package +# A VCS URL +pdm add -e git+https://github.com/pallets/click.git@main#egg=click +``` ### Save version specifiers @@ -96,7 +131,7 @@ for the dependency): ### Add prereleases -One can give `--pre/--prerelease` option to `pdm add` so that prereleases are allowed to be pinned for the given packages. +One can give `--pre/--prerelease` option to [`pdm add`](cli_reference.md#exec-0--add) so that prereleases are allowed to be pinned for the given packages. ## Update existing dependencies @@ -147,7 +182,7 @@ which is given by `--update-` option: ### Update packages to the versions that break the version specifiers One can give `-u/--unconstrained` to tell PDM to ignore the version specifiers in the `pyproject.toml`. -This works similarly to the `yarn upgrade -L/--latest` command. Besides, `pdm update` also supports the +This works similarly to the `yarn upgrade -L/--latest` command. Besides, [`pdm update`](cli_reference.md#exec-0--update) also supports the `--pre/--prerelease` option. ## Remove existing dependencies @@ -167,13 +202,12 @@ pdm remove -dG test pytest-cov There are two similar commands to do this job with a slight difference: -- `pdm install` will check the lock file and relock if it mismatches with project file, then install. -- `pdm sync` installs dependencies in the lock file and will error out if it doesn't exist. - Besides, `pdm sync` can also remove unneeded packages if `--clean` option is given. +- [`pdm install`](cli_reference.md#exec-0--install) will check the lock file and relock if it mismatches with project file, then install. +- [`pdm update`](cli_reference.md#exec-0--sync) installs dependencies in the lock file and will error out if it doesn't exist. Besides, [`pdm sync`](cli_reference.md#exec-0--sync) can also remove unneeded packages if `--clean` option is given. ## Specify the lockfile to use -You can specify another lockfile than the default `pdm.lock` by using the `-L/--lockfilie ` option or the `PDM_LOCKFILE` environment variable. +You can specify another lockfile than the default [`pdm lock`](cli_reference.md#exec-0--lock) by using the `-L/--lockfilie ` option or the `PDM_LOCKFILE` environment variable. ### Select a subset of dependencies with CLI options @@ -252,6 +286,21 @@ Include the following setting in `pyproject.toml` to enable: allow_prereleases = true ``` +## Set acceptable format for locking or installing + +If you want to control the format(binary/sdist) of the packages, you can set the env vars `PDM_NO_BINARY` and `PDM_ONLY_BINARY`. + +Each env var is a comma-separated list of package name. You can set it to `:all:` to apply to all packages. For example: + +``` +# No binary for werkzeug will be locked nor used for installation +PDM_NO_BINARY=werkzeug pdm add flask +# Only binaries will be locked in the lock file +PDM_ONLY_BINARY=:all: pdm lock +# No binaries will be used for installation +PDM_NO_BINARY=:all: pdm install +``` + ## Solve the locking failure If PDM is not able to find a resolution to satisfy the requirements, it will raise an error. For example, diff --git a/docs/docs/usage/hooks.md b/docs/docs/usage/hooks.md new file mode 100644 index 0000000000..c51a0a697b --- /dev/null +++ b/docs/docs/usage/hooks.md @@ -0,0 +1,262 @@ +# Lifecycle and hooks + +As any Python deliverable, your project will go through the different phases +of a Python project lifecycle and PDM provides commands to perform the expected tasks for those phases. + +It also provides hooks attached to these steps allowing for: + +- plugins to listen to the [signals][pdm.signals] of the same name. +- developpers to define custom scripts with the same name. + +The built-in commands are currently splitted in 3 groups: + +- the [initialization phase](#initialization) +- the [dependencies management](#dependencies-managment). +- the [publication phase](#publication). + +You will most probably need to perform some recurrent tasks between the installation and publication phases (housekeeping, linting, testing, ...) +this is why PDM lets you define your own tasks/phases using [user scripts](#user-scripts). + +To provides full flexibility, PDM allows to [skip some hooks and tasks](#skipping) on demand. + +## Initialization + +The intialization phase should occur only once in a project lifetime by running the [`pdm init`](cli_reference.md#exec-0--init) +command to initialize an existing project (prompt to fill the `pyproject.toml` file). + +They trigger the following hooks: + +- [`post_init`][pdm.signals.post_init] + +```mermaid +flowchart LR + subgraph pdm-init [pdm init] + direction LR + post-init{{Emit post_init}} + init --> post-init + end +``` + +## Dependencies management + +The dependencies management is required for the developer to be able to work and perform the following: + +- `lock`: compute a lock file from the `pyproject.toml` requirements. +- `sync`: synchronize (add/remove/update) PEP582 packages from the lock file and install the current project as editable. +- `add`: add a dependency +- `remove`: remove a dependency + +All thoses steps are directly available with the following commands: + +- [`pdm lock`](cli_reference.md#exec-0--lock): execute the `lock` task +- [`pdm sync`](cli_reference.md#exec-0--sync): execute the `sync` task +- [`pdm install`](cli_reference.md#exec-0--install): execute the `sync` task, preceded from `lock` if required +- [`pdm add`](cli_reference.md#exec-0--add): add a dependency requirement, re-lock and then sync +- [`pdm remove`](cli_reference.md#exec-0--remove): remove a dependency requirement, re-lock and then sync +- [`pdm update`](cli_reference.md#exec-0--update): re-lock dependencies from their latest versions and then sync + +They trigger the following hooks: + +- [`pre_install`][pdm.signals.pre_install] +- [`post_install`][pdm.signals.post_install] +- [`pre_lock`][pdm.signals.pre_lock] +- [`post_lock`][pdm.signals.post_lock] + +```mermaid +flowchart LR + subgraph pdm-install [pdm install] + direction LR + + subgraph pdm-lock [pdm lock] + direction TB + pre-lock{{Emit pre_lock}} + post-lock{{Emit post_lock}} + pre-lock --> lock --> post-lock + end + + subgraph pdm-sync [pdm sync] + direction TB + pre-install{{Emit pre_install}} + post-install{{Emit post_install}} + pre-install --> sync --> post-install + end + + pdm-lock --> pdm-sync + end +``` + +### Switching Python version + +This is a special case in dependency management: +you can switch the current Python version using [`pdm use`](cli_reference.md#exec-0--use) +and it will emit the [`post_use`][pdm.signals.post_use] signal with the new Python interpreter. + +```mermaid +flowchart LR + subgraph pdm-use [pdm use] + direction LR + post-use{{Emit post_use}} + use --> post-use + end +``` + +## Publication + +As soon as you are ready to publish you package/app/libray, you will require the publication tasks: + +- `build`: build/compile assets requiring it and package everything into a Python package (sdist, wheel) +- `upload`: upload/publish the package to a remote PyPI index + +All thoses steps are available with the following commands: + +- [`pdm build`](cli_reference.md#exec-0--build) +- [`pdm publish`](cli_reference.md#exec-0--publish) + +They trigger the following hooks: + +- [`pre_publish`][pdm.signals.pre_publish] +- [`post_publish`][pdm.signals.post_publish] +- [`pre_build`][pdm.signals.pre_build] +- [`post_build`][pdm.signals.post_build] + + +```mermaid +flowchart LR + subgraph pdm-publish [pdm publish] + direction LR + pre-publish{{Emit pre_publish}} + post-publish{{Emit post_publish}} + + subgraph pdm-build [pdm build] + pre-build{{Emit pre_build}} + post-build{{Emit post_build}} + pre-build --> build --> post-build + end + + %% subgraph pdm-upload [pdm upload] + %% pre-upload{{Emit pre_upload}} + %% post-upload{{Emit post_upload}} + %% pre-upload --> upload --> post-upload + %% end + + pre-publish --> pdm-build --> upload --> post-publish + end +``` + +Execution will stop at first failure, hooks included. + +## User scripts + +[User scripts are detailed in their own section](scripts.md) but you should know that: + +- each user script can define a `pre_*` and `post_*` script, including composite scripts. +- each `run` execution will trigger the [`pre_run`][pdm.signals.pre_run] and [`post_run`][pdm.signals.post_run] hooks +- each script execution will trigger the [`pre_script`][pdm.signals.pre_script] and [`post_script`][pdm.signals.post_script] hooks + +Given the following `scripts` definition: + +```toml +[tool.pdm.scripts] +pre_script = "" +post_script = "" +pre_test = "" +post_test = "" +test = "" +pre_composite = "" +post_composite = "" +composite = {composite: ["test"]} +``` + +a `pdm run test` will have the following lifecycle: + +```mermaid +flowchart LR + subgraph pdm-run-test [pdm run test] + direction LR + pre-run{{Emit pre_run}} + post-run{{Emit post_run}} + subgraph run-test [test task] + direction TB + pre-script{{Emit pre_script}} + post-script{{Emit post_script}} + pre-test[Execute pre_test] + post-test[Execute post_test] + test[Execute test] + + pre-script --> pre-test --> test --> post-test --> post-script + end + + pre-run --> run-test --> post-run + end +``` + +while `pdm run composite` will have the following: + +```mermaid +flowchart LR + subgraph pdm-run-composite [pdm run composite] + direction LR + pre-run{{Emit pre_run}} + post-run{{Emit post_run}} + + subgraph run-composite [composite task] + direction TB + pre-script-composite{{Emit pre_script}} + post-script-composite{{Emit post_script}} + pre-composite[Execute pre_composite] + post-composite[Execute post_composite] + + subgraph run-test [test task] + direction TB + pre-script-test{{Emit pre_script}} + post-script-test{{Emit post_script}} + pre-test[Execute pre_test] + post-test[Execute post_test] + + pre-script-test --> pre-test --> test --> post-test --> post-script-test + end + + pre-script-composite --> pre-composite --> run-test --> post-composite --> post-script-composite + end + + pre-run --> run-composite --> post-run + end +``` + +## Skipping + +It is possible to control which task and hook runs for any built-in command as well as custom user scripts +using the `--skip` option. + +It accepts a comma-separated list of hooks/task names to skip +as well as the predefined `:all`, `:pre` and `:post` shortcuts +respectively skipping all hooks, all `pre_*` hooks and all `post_*` hooks. +You can also provide the skip list in `PDM_SKIP_HOOKS` environment variable +but it will be overridden as soon as the `--skip` parameter is provided. + +Given the previous script block, running `pdm run --skip=:pre,post_test composite` will result in the following reduced lifecycle: + +```mermaid +flowchart LR + subgraph pdm-run-composite [pdm run composite] + direction LR + post-run{{Emit post_run}} + + subgraph run-composite [composite task] + direction TB + post-script-composite{{Emit post_script}} + post-composite[Execute post_composite] + + subgraph run-test [test task] + direction TB + post-script-test{{Emit post_script}} + + test --> post-script-test + end + + run-test --> post-composite --> post-script-composite + end + + run-composite --> post-run + end +``` diff --git a/docs/docs/usage/project.md b/docs/docs/usage/project.md index 084a43bcf7..40bc64b545 100644 --- a/docs/docs/usage/project.md +++ b/docs/docs/usage/project.md @@ -1,19 +1,8 @@ # Manage Project -PDM can act as a PEP 517 build backend, to enable that, write the following lines in your -`pyproject.toml`. If you used `pdm init` to create it for you, it should be done already. - -```toml -[build-system] -requires = ["pdm-pep517"] -build-backend = "pdm.pep517.api" -``` - -`pip` will read the backend settings to install or build a package. - ## Choose a Python interpreter -If you have used `pdm init`, you must have already seen how PDM detects and selects the Python +If you have used [`pdm init`](cli_reference.md#exec-0--init), you must have already seen how PDM detects and selects the Python interpreter. After initialized, you can also change the settings by `pdm use `. The argument can be either a version specifier of any length, or a relative or absolute path to the python interpreter, but remember the Python interpreter must conform with the `python_requires` @@ -116,12 +105,68 @@ Any local configurations will be stored in `.pdm.toml` under the project root di The configuration files are searched in the following order: 1. `/.pdm.toml` - The project configuration -2. `~/.pdm/config.toml` - The home configuration +2. `/config.toml` - The home configuration -If `-g/--global` option is used, the first item will be replaced by `~/.pdm/global-project/.pdm.toml`. +where `` is: + +- `$XDG_CONFIG_HOME/pdm` (`~/.config/pdm` in most cases) on Linux as defined by [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) +- `~/Library/Preferences/pdm` on MacOS as defined by [Appli File System Basics](https://developer.apple.com/library/archive/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/FileSystemOverview/FileSystemOverview.html) +- `%USERPROFILE%\AppData\Local\pdm` on Windows as defined in [Known folders](https://docs.microsoft.com/en-us/windows/win32/shell/known-folders) + +If `-g/--global` option is used, the first item will be replaced by `/global-project/.pdm.toml`. You can find all available configuration items in [Configuration Page](../configuration.md). +## Publish the project to PyPI + +With PDM, you can build and then upload your project to PyPI in one step. + +```bash +pdm publish +``` + +You can specify which repository you would like to publish: + +```bash +pdm publish -r pypi +``` + +PDM will look for the repository named `pypi` from the configuration and use the URL for upload. +You can also give the URL directly with `-r/--repository` option: + +```bash +pdm publish -r https://test.pypi.org/simple +``` + +See all supported options by typing `pdm publish --help`. + +### Configure the repository secrets for upload + +When using the [`pdm publish`](cli_reference.md#exec-0--publish) command, it reads the repository secrets from the *global* config file(`/config.toml`). The content of the config is as follows: + +```toml +[repository.pypi] +username = "frostming" +password = "" + +[repository.company] +url = "https://pypi.company.org/legacy/" +username = "frostming" +password = "" +``` + +!!! NOTE + You don't need to configure the `url` for `pypi` and `testpypi` repositories, they are filled by default values. + +To change the repository config from the command line, use the [`pdm config`](cli_reference.md#exec-0--config) command: + +```bash +pdm config repository.pypi.username "__token__" +pdm config repository.pypi.password "my-pypi-token" + +pdm config repository.company.url "https://pypi.company.org/legacy/" +``` + ## Cache the installation of wheels If a package is required by many projects on the system, each project has to keep its own copy. This may become a waste of disk space especially for data science and machine learning libraries. @@ -144,7 +189,7 @@ The caches are located under `$(pdm config cache_dir)/packages`. One can view th Sometimes users may want to keep track of the dependencies of global Python interpreter as well. It is easy to do so with PDM, via `-g/--global` option which is supported by most subcommands. -If the option is passed, `~/.pdm/global-project` will be used as the project directory, which is +If the option is passed, `/global-project` will be used as the project directory, which is almost the same as normal project except that `pyproject.toml` will be created automatically for you and it doesn't support build features. The idea is taken from Haskell's [stack](https://docs.haskellstack.org). @@ -152,7 +197,7 @@ However, unlike `stack`, by default, PDM won't use global project automatically Users should pass `-g/--global` explicitly to activate it, since it is not very pleasing if packages go to a wrong place. But PDM also leave the decision to users, just set the config `global_project.fallback` to `true`. -If you want global project to track another project file other than `~/.pdm/global-project`, you can provide the +If you want global project to track another project file other than `/global-project`, you can provide the project path via `-p/--project ` option. !!! attention "CAUTION" @@ -179,14 +224,17 @@ PDM provides `import` command so that you don't have to initialize the project m 1. Pipenv's `Pipfile` 2. Poetry's section in `pyproject.toml` 3. Flit's section in `pyproject.toml` -4. `requirements.txt` format used by Pip +4. `requirements.txt` format used by pip +5. setuptools `setup.py` -Also, when you are executing `pdm init` or `pdm install`, PDM can auto-detect possible files to import -if your PDM project has not been initialized yet. +Also, when you are executing [`pdm init`](cli_reference.md#exec-0--init) or [`pdm install`](cli_reference.md#exec-0--install), PDM can auto-detect possible files to import if your PDM project has not been initialized yet. + +!!! attention "CAUTION" + Converting a `setup.py` will execute the file with the project interpreter. Make sure `setuptools` is installed with the interpreter and the `setup.py` is trusted. ## Export locked packages to alternative formats -You can also export `pdm.lock` to other formats, to ease the CI flow or image building process. Currently, +You can also export [`pdm lock`](cli_reference.md#exec-0--lock) to other formats, to ease the CI flow or image building process. Currently, only `requirements.txt` and `setup.py` format is supported: ```console diff --git a/docs/docs/usage/scripts.md b/docs/docs/usage/scripts.md index 8a092f6bf0..e2c8691730 100644 --- a/docs/docs/usage/scripts.md +++ b/docs/docs/usage/scripts.md @@ -10,7 +10,14 @@ pdm run flask run -p 54321 It will run `flask run -p 54321` in the environment that is aware of packages in `__pypackages__/` folder. -## `[tool.pdm.scripts]` Table +!!! note + There is a builtin shortcut making all scripts available as root commands + as long as the script does not conflict with any builtin or plugin-contributed command. + Said otherwise, if you have a `test` script, you can run both `pdm run test` and `pdm test`. + But if you have an `install` script, only `pdm run install` will run it, + `pdm install` will still run the builtin `install` command. + +## User Scripts PDM also supports custom script shortcuts in the optional `[tool.pdm.scripts]` section of `pyproject.toml`. @@ -28,14 +35,16 @@ $ pdm run start Flask server started at http://127.0.0.1:54321 ``` -Any extra arguments will be appended to the command: +Any following arguments will be appended to the command: ```bash $ pdm run start -h 0.0.0.0 Flask server started at http://0.0.0.0:54321 ``` -PDM supports 3 types of scripts: +--- + +PDM supports 4 types of scripts: ### `cmd` @@ -85,6 +94,34 @@ The function can be supplied with literal arguments: foobar = {call = "foo_package.bar_module:main('dev')"} ``` +### `composite` + +This script kind execute other defined scripts: + +```toml +[tool.pdm.scripts] +lint = "flake8" +test = "pytest" +all = {composite = ["lint", "test"]} +``` + +Running `pdm run all` will run `lint` first and then `test` if `lint` succeeded. + +You can also provide arguments to the called scripts: + +```toml +[tool.pdm.scripts] +lint = "flake8" +test = "pytest" +all = {composite = ["lint mypackage/", "test -v tests/"]} +``` + +!!! note + Argument passed on the command line are given to each called task. + + +## Script Options + ### `env` All environment variables set in the current shell can be seen by `pdm run` and will be expanded when executed. @@ -98,6 +135,9 @@ start.env = {FOO = "bar", FLASK_ENV = "development"} Note how we use [TOML's syntax](https://github.com/toml-lang/toml) to define a composite dictionary. +!!! note + Environment variables specified on a composite task level will override those defined by called tasks. + ### `env_file` You can also store all environment variables in a dotenv file and let PDM read it: @@ -108,6 +148,9 @@ start.cmd = "flask run -p 54321" start.env_file = ".env" ``` +!!! note + A dotenv file specified on a composite task level will override those defined by called tasks. + ### `site_packages` To make sure the running environment is properly isolated from the outer Python interpreter, @@ -119,9 +162,9 @@ site-packages from the selected interpreter WON'T be loaded into `sys.path`, unl Note that site-packages will always be loaded if running with PEP 582 enabled(without the `pdm run` prefix). -### Shared Settings +### Shared Options -If you want the settings to be shared by all tasks run by `pdm run`, +If you want the options to be shared by all tasks run by `pdm run`, you can write them under a special key `_` in `[tool.pdm.scripts]` table: ```toml @@ -139,11 +182,13 @@ Use `pdm run --list/-l` to show the list of available script shortcuts: ```bash $ pdm run --list -Name Type Script Description ------------ ----- ---------------- ---------------------- -test_cmd cmd flask db upgrade -test_script call test_script:main call a python function -test_shell shell echo $FOO shell command +╭─────────────┬───────┬───────────────────────────╮ +│ Name │ Type │ Description │ +├─────────────┼───────┼───────────────────────────┤ +│ test_cmd │ cmd │ flask db upgrade │ +│ test_script │ call │ call a python function │ +│ test_shell │ shell │ shell command │ +╰─────────────┴───────┴───────────────────────────╯ ``` You can add an `help` option with the description of the script, and it will be displayed in the `Description` column in the above output. @@ -175,6 +220,12 @@ Under certain situations PDM will look for some special hook scripts for executi - `post_lock`: Run after dependency resolution - `pre_build`: Run before building distributions - `post_build`: Run after distributions are built +- `pre_publish`: Run before publishinbg distributions +- `post_publish`: Run after distributions are published +- `pre_script`: Run before any script +- `post_script`: Run after any script +- `pre_run`: Run once before run script invocation +- `post_script`: Run once after run script invocation !!! note Pre & post scripts can't receive any arguments. @@ -183,3 +234,30 @@ Under certain situations PDM will look for some special hook scripts for executi If there exists an `install` scripts under `[tool.pdm.scripts]` table, `pre_install` scripts can be triggered by both `pdm install` and `pdm run install`. So it is recommended to not use the preserved names. + +!!! note + Composite tasks can also have pre and post scripts. + Called tasks will run their own pre and post scripts. + +## Skipping scripts + +Because, sometimes it is desirable to run a script but without its hooks or pre and post scripts, +there is a `--skip=:all` which will disable all hooks, pre and post. +There is also `--skip=:pre` and `--skip=:post` allowing to respectively +skip all `pre_*` hooks and all `post_*` hooks. + +It is also possible to need a pre script but not the post one, +or to need all tasks from a composite tasks except one. +For those use cases, there is a finer grained `--skip` parameter +accepting a list of tasks or hooks name to exclude. + +```console +pdm run --skip pre_task1,task2 my-composite +``` + +This command will run the `my-composite` task and skip the `pre_task1` hook as well as the `task2` and its hooks. + +You can also provide you skip list in `PDM_SKIP_HOOKS` environment variable +but it will be overridden as soon as the `--skip` parameter is provided. + +There is more details on hooks and pre/post scripts behavior on [the dedicated hooks page](hooks.md). diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 7ccb223044..d029627777 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -28,6 +28,7 @@ theme: plugins: - search - mike + - markdown-exec - mkdocstrings: handlers: python: @@ -40,10 +41,13 @@ nav: - usage/dependency.md - usage/project.md - usage/scripts.md + - usage/hooks.md - usage/advanced.md + - usage/cli_reference.md - Writing pyproject.toml: - pyproject/pep621.md - pyproject/tool-pdm.md + - pyproject/build.md - Configuration: configuration.md - Plugins: - plugin/write.md @@ -56,7 +60,6 @@ nav: markdown_extensions: - pymdownx.highlight: linenums: true - - pymdownx.superfences - pymdownx.tabbed: alternate_style: true - pymdownx.details @@ -66,6 +69,11 @@ markdown_extensions: permalink: "#" - markdown_include.include: base_path: docs + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format copyright: Copyright © 2019-2021 Frost Ming diff --git a/news/1062.feature.md b/news/1062.feature.md new file mode 100644 index 0000000000..48fdfef017 --- /dev/null +++ b/news/1062.feature.md @@ -0,0 +1 @@ +Add support for importing from a `setup.py` project. diff --git a/news/1072.dep.md b/news/1072.dep.md new file mode 100644 index 0000000000..0d050b61e6 --- /dev/null +++ b/news/1072.dep.md @@ -0,0 +1 @@ +Prefer tomllib on Python 3.11 diff --git a/news/1083.feature.md b/news/1083.feature.md new file mode 100644 index 0000000000..73417f3296 --- /dev/null +++ b/news/1083.feature.md @@ -0,0 +1 @@ +*BREAKING*: Editable dependencies in the `[project]` table is not allowed, according to PEP 621. They are however still allowed in the `[tool.pdm.dev-dependencies]` table. PDM will emit a warning when it finds editable dependencies in the `[project]` table, or will abort when you try to add them into the `[project]` table via CLI. diff --git a/news/1091.dep.md b/news/1091.dep.md new file mode 100644 index 0000000000..dd8896b525 --- /dev/null +++ b/news/1091.dep.md @@ -0,0 +1 @@ +Drop the vendored libraries `click`, `halo`, `colorama` and `log_symbols`. PDM has no vendors now. diff --git a/news/1091.feature.md b/news/1091.feature.md new file mode 100644 index 0000000000..01fc1c575f --- /dev/null +++ b/news/1091.feature.md @@ -0,0 +1 @@ +Switch the UI backend to `rich`. diff --git a/news/1091.misc.md b/news/1091.misc.md new file mode 100644 index 0000000000..7b4534b1bd --- /dev/null +++ b/news/1091.misc.md @@ -0,0 +1 @@ +Extract the compatibility import statements into `compat` module. diff --git a/news/1093.bugfix.md b/news/1093.bugfix.md new file mode 100644 index 0000000000..c9bf16458b --- /dev/null +++ b/news/1093.bugfix.md @@ -0,0 +1 @@ +Fix a bug that candidates with local part in the version can't be found and installed correctly. diff --git a/news/1096.feature.md b/news/1096.feature.md new file mode 100644 index 0000000000..b4be12c157 --- /dev/null +++ b/news/1096.feature.md @@ -0,0 +1 @@ +Improved the terminal UI and logging. Disable live progress under verbose mode. The logger levels can be controlled by the `-v` option. diff --git a/news/1096.refactor.md b/news/1096.refactor.md new file mode 100644 index 0000000000..e228a9db15 --- /dev/null +++ b/news/1096.refactor.md @@ -0,0 +1 @@ +Use `unearth` to replace `pip`'s `PackageFinder` and related data models. PDM no longer relies on `pip` internals, which are unstable across updates. diff --git a/news/1098.feature.md b/news/1098.feature.md new file mode 100644 index 0000000000..9831ca1fd9 --- /dev/null +++ b/news/1098.feature.md @@ -0,0 +1 @@ +Lazy load the candidates returned by `find_matches()` to speed up the resolution. diff --git a/news/1107.feature.md b/news/1107.feature.md new file mode 100644 index 0000000000..3e9b6b6239 --- /dev/null +++ b/news/1107.feature.md @@ -0,0 +1 @@ +Add a new command `publish` to PDM since it is required for so many people and it will make the workflow easier. diff --git a/news/1117.feature.md b/news/1117.feature.md new file mode 100644 index 0000000000..6b715dad63 --- /dev/null +++ b/news/1117.feature.md @@ -0,0 +1 @@ +Add a `composite` script kind allowing to run multiple defined scripts in a single command as well as reusing scripts but overriding `env` or `env_file`. diff --git a/news/1127.feature.md b/news/1127.feature.md new file mode 100644 index 0000000000..8c44ffb477 --- /dev/null +++ b/news/1127.feature.md @@ -0,0 +1 @@ +Add a new execution option `--skip` to opt-out some scripts and hooks from any execution (both scripts and PDM commands). diff --git a/news/1147.feature.md b/news/1147.feature.md new file mode 100644 index 0000000000..fc4ed3bb03 --- /dev/null +++ b/news/1147.feature.md @@ -0,0 +1 @@ +Add the `pre/post_publish`, `pre/post_run` and `pre/post_script` hooks as well as an extensive lifecycle and hooks documentation. diff --git a/news/1151.feature.md b/news/1151.feature.md new file mode 100644 index 0000000000..13af2d79c2 --- /dev/null +++ b/news/1151.feature.md @@ -0,0 +1 @@ +Shorter scripts listing, especially for multilines and composite scripts. diff --git a/news/1153.dep.md b/news/1153.dep.md new file mode 100644 index 0000000000..c7a3da9f29 --- /dev/null +++ b/news/1153.dep.md @@ -0,0 +1 @@ +Update dependency version `pdm-pep517` to 1.0.0. diff --git a/news/1154.feature.md b/news/1154.feature.md new file mode 100644 index 0000000000..0185867a4a --- /dev/null +++ b/news/1154.feature.md @@ -0,0 +1 @@ +Improve the lock speed by parallelizing the hash fetching. diff --git a/news/1156.fix.md b/news/1156.fix.md new file mode 100644 index 0000000000..af63d2122f --- /dev/null +++ b/news/1156.fix.md @@ -0,0 +1 @@ +Try parsing the candidate metadata from `pyproject.toml` before building it. diff --git a/news/1157.feature.md b/news/1157.feature.md new file mode 100644 index 0000000000..5786eb123b --- /dev/null +++ b/news/1157.feature.md @@ -0,0 +1 @@ +Update the converters to support the new `[tool.pdm.build]` table. diff --git a/news/1157.removal.md b/news/1157.removal.md new file mode 100644 index 0000000000..a1a55a6ffa --- /dev/null +++ b/news/1157.removal.md @@ -0,0 +1 @@ +PDM legacy metadata format(from `pdm 0.x`) is no longer supported. diff --git a/news/1159.feature.md b/news/1159.feature.md new file mode 100644 index 0000000000..9666242b0d --- /dev/null +++ b/news/1159.feature.md @@ -0,0 +1 @@ +Scripts are now available as root command if they don't conflict with any builtin or plugin-contributed command. diff --git a/news/1160.misc.md b/news/1160.misc.md new file mode 100644 index 0000000000..d5da6b0589 --- /dev/null +++ b/news/1160.misc.md @@ -0,0 +1 @@ +Provides a `tox.ini` file for easier local testing against all Python versions. diff --git a/news/1161.fix.md b/news/1161.fix.md new file mode 100644 index 0000000000..8aa1991392 --- /dev/null +++ b/news/1161.fix.md @@ -0,0 +1 @@ +Ensure all app/user path related are computed according to platform standards. diff --git a/news/1163.feature.md b/news/1163.feature.md new file mode 100644 index 0000000000..662d5e1b79 --- /dev/null +++ b/news/1163.feature.md @@ -0,0 +1 @@ +Add a `post_use` hook triggered after succesfully switching Python version. diff --git a/news/593.feature.md b/news/593.feature.md new file mode 100644 index 0000000000..6dc9d163d7 --- /dev/null +++ b/news/593.feature.md @@ -0,0 +1 @@ +Configuration item to respect the source order in the `pyproject.toml` file. Packages will be returned by source earlier in the order or later ones if not found. diff --git a/pdm.lock b/pdm.lock index b3897feecc..6b9909174c 100644 --- a/pdm.lock +++ b/pdm.lock @@ -3,15 +3,6 @@ name = "arpeggio" version = "1.10.2" summary = "Packrat parser interpreter" -[[package]] -name = "astunparse" -version = "1.6.3" -summary = "An AST unparser for Python" -dependencies = [ - "six<2.0,>=1.6.1", - "wheel<1.0,>=0.23.0", -] - [[package]] name = "atomicwrites" version = "1.4.0" @@ -29,6 +20,27 @@ name = "blinker" version = "1.4" summary = "Fast, simple object-to-object and broadcast signaling" +[[package]] +name = "cachecontrol" +version = "0.12.11" +requires_python = ">=3.6" +summary = "httplib2 caching for requests" +dependencies = [ + "msgpack>=0.5.2", + "requests", +] + +[[package]] +name = "cachecontrol" +version = "0.12.11" +extras = ["filecache"] +requires_python = ">=3.6" +summary = "httplib2 caching for requests" +dependencies = [ + "cachecontrol>=0.12.11", + "lockfile>=0.9", +] + [[package]] name = "cached-property" version = "1.5.2" @@ -36,7 +48,8 @@ summary = "A decorator for caching properties in classes." [[package]] name = "certifi" -version = "2021.10.8" +version = "2022.5.18.1" +requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." [[package]] @@ -47,7 +60,7 @@ summary = "The Real First Universal Charset Detector. Open, modern and actively [[package]] name = "click" -version = "8.1.2" +version = "8.1.3" requires_python = ">=3.7" summary = "Composable command line interface toolkit" dependencies = [ @@ -88,15 +101,26 @@ requires_python = ">=3.7" summary = "Code coverage measurement for Python" dependencies = [ "coverage>=5.2.1", - "tomli; python_version < \"3.11\"", + "tomli", ] +[[package]] +name = "distlib" +version = "0.3.4" +summary = "Distribution utilities" + [[package]] name = "execnet" version = "1.9.0" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" summary = "execnet: rapid multi-Python deployment" +[[package]] +name = "filelock" +version = "3.7.1" +requires_python = ">=3.7" +summary = "A platform independent file lock." + [[package]] name = "findpython" version = "0.1.6" @@ -108,7 +132,7 @@ dependencies = [ [[package]] name = "ghp-import" -version = "2.0.2" +version = "2.1.0" summary = "Copy your docs directly to the gh-pages branch." dependencies = [ "python-dateutil>=2.8.1", @@ -116,7 +140,7 @@ dependencies = [ [[package]] name = "griffe" -version = "0.17.0" +version = "0.18.0" requires_python = ">=3.7" summary = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." dependencies = [ @@ -139,15 +163,6 @@ dependencies = [ "zipp>=0.5", ] -[[package]] -name = "importlib-resources" -version = "5.7.1" -requires_python = ">=3.7" -summary = "Read resources from Python packages" -dependencies = [ - "zipp>=3.1.0; python_version < \"3.10\"", -] - [[package]] name = "incremental" version = "21.3.0" @@ -166,7 +181,7 @@ summary = "A library for installing Python wheels." [[package]] name = "jinja2" -version = "3.1.1" +version = "3.1.2" requires_python = ">=3.7" summary = "A very fast and expressive template engine." dependencies = [ @@ -174,25 +189,28 @@ dependencies = [ ] [[package]] -name = "jsonschema" -version = "4.4.0" -requires_python = ">=3.7" -summary = "An implementation of JSON Schema validation for Python" -dependencies = [ - "attrs>=17.4.0", - "importlib-resources>=1.4.0; python_version < \"3.9\"", - "pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0", -] +name = "lockfile" +version = "0.12.2" +summary = "Platform-independent file locking module" [[package]] name = "markdown" -version = "3.3.6" +version = "3.3.7" requires_python = ">=3.6" summary = "Python implementation of Markdown." dependencies = [ "importlib-metadata>=4.4; python_version < \"3.10\"", ] +[[package]] +name = "markdown-exec" +version = "0.7.0" +requires_python = ">=3.7" +summary = "Utilities to execute code blocks in Markdown files." +dependencies = [ + "pymdown-extensions>=9", +] + [[package]] name = "markdown-include" version = "0.6.0" @@ -254,7 +272,7 @@ dependencies = [ [[package]] name = "mkdocs-material" -version = "8.2.9" +version = "8.2.13" requires_python = ">=3.7" summary = "Documentation that simply works" dependencies = [ @@ -274,7 +292,7 @@ summary = "Extension pack for Python Markdown." [[package]] name = "mkdocstrings" -version = "0.18.1" +version = "0.19.0" requires_python = ">=3.7" summary = "Automatic documentation from sources, for MkDocs." dependencies = [ @@ -283,7 +301,6 @@ dependencies = [ "MarkupSafe>=1.1", "mkdocs-autorefs>=0.3.1", "mkdocs>=1.2", - "mkdocstrings-python-legacy>=0.2", "pymdown-extensions>=6.3", ] @@ -294,17 +311,7 @@ requires_python = ">=3.7" summary = "A Python handler for mkdocstrings." dependencies = [ "griffe>=0.11.1", - "mkdocstrings>=0.18", -] - -[[package]] -name = "mkdocstrings-python-legacy" -version = "0.2.2" -requires_python = ">=3.7" -summary = "A legacy Python handler for mkdocstrings." -dependencies = [ - "mkdocstrings>=0.18", - "pytkdocs>=0.14", + "mkdocstrings>=0.19", ] [[package]] @@ -318,6 +325,11 @@ dependencies = [ "mkdocstrings>=0.18", ] +[[package]] +name = "msgpack" +version = "1.0.3" +summary = "MessagePack serializer" + [[package]] name = "packaging" version = "21.3" @@ -340,7 +352,7 @@ dependencies = [ [[package]] name = "pdm-pep517" -version = "0.12.3" +version = "1.0.0" requires_python = ">=3.7" summary = "A PEP 517 backend for PDM that supports PEP 621 metadata" @@ -356,7 +368,7 @@ dependencies = [ [[package]] name = "pip" -version = "22.0.4" +version = "22.1.1" requires_python = ">=3.7" summary = "The PyPA recommended tool for installing Python packages." @@ -408,15 +420,9 @@ version = "3.0.8" requires_python = ">=3.6.8" summary = "pyparsing module - Classes and methods to define and execute parsing grammars" -[[package]] -name = "pyrsistent" -version = "0.18.1" -requires_python = ">=3.7" -summary = "Persistent/Functional/Immutable data structures" - [[package]] name = "pytest" -version = "7.1.1" +version = "7.1.2" requires_python = ">=3.7" summary = "pytest: simple powerful testing with Python" dependencies = [ @@ -486,17 +492,6 @@ version = "0.20.0" requires_python = ">=3.5" summary = "Read key-value pairs from a .env file and set them as environment variables" -[[package]] -name = "pytkdocs" -version = "0.16.1" -requires_python = ">=3.7" -summary = "Load Python objects documentation." -dependencies = [ - "astunparse>=1.6; python_version < \"3.9\"", - "cached-property>=1.5; python_version < \"3.8\"", - "typing-extensions>=3.7; python_version < \"3.8\"", -] - [[package]] name = "pyyaml" version = "6.0" @@ -515,15 +510,23 @@ dependencies = [ [[package]] name = "requests" version = "2.27.1" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +requires_python = ">=3.7, <4" summary = "Python HTTP for Humans." dependencies = [ "certifi>=2017.4.17", - "charset-normalizer~=2.0.0; python_version >= \"3\"", - "idna<4,>=2.5; python_version >= \"3\"", + "charset-normalizer~=2.0.0", + "idna<4,>=2.5", "urllib3<1.27,>=1.21.1", ] +[[package]] +name = "requests-toolbelt" +version = "0.9.1" +summary = "A utility belt for advanced users of python-requests" +dependencies = [ + "requests<3.0.0,>=2.0.1", +] + [[package]] name = "resolvelib" version = "0.8.1" @@ -531,7 +534,7 @@ summary = "Resolve abstract dependencies into concrete ones" [[package]] name = "rich" -version = "12.2.0" +version = "12.3.0" requires_python = ">=3.6.3,<4.0.0" summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" dependencies = [ @@ -572,7 +575,7 @@ summary = "A lil' TOML parser" [[package]] name = "tomlkit" -version = "0.10.1" +version = "0.10.2" requires_python = ">=3.6,<4.0" summary = "Style preserving TOML library" @@ -589,49 +592,80 @@ dependencies = [ "tomli; python_version >= \"3.6\"", ] +[[package]] +name = "tox" +version = "3.25.0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +summary = "tox is a generic virtualenv management and test command line tool" +dependencies = [ + "colorama>=0.4.1; platform_system == \"Windows\"", + "filelock>=3.0.0", + "importlib-metadata>=0.12; python_version < \"3.8\"", + "packaging>=14", + "pluggy>=0.12.0", + "py>=1.4.17", + "six>=1.14.0", + "toml>=0.9.4", + "virtualenv!=20.0.0,!=20.0.1,!=20.0.2,!=20.0.3,!=20.0.4,!=20.0.5,!=20.0.6,!=20.0.7,>=16.0.0", +] + +[[package]] +name = "tox-pdm" +version = "0.5.0" +requires_python = ">=3.7" +summary = "A plugin for tox that utilizes PDM as the package manager and installer" +dependencies = [ + "toml>=0.10", + "tox>=3.18.0", +] + [[package]] name = "typing-extensions" version = "4.2.0" requires_python = ">=3.7" summary = "Backported and Experimental Type Hints for Python 3.7+" +[[package]] +name = "unearth" +version = "0.4.0" +requires_python = ">=3.7" +summary = "A utility to fetch and download python packages" +dependencies = [ + "cached-property>=1.5.2; python_version < \"3.8\"", + "packaging>20", + "requests>2.25", +] + [[package]] name = "urllib3" version = "1.26.9" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" summary = "HTTP library with thread-safe connection pooling, file post, and more." -[[package]] -name = "vendoring" -version = "1.2.0" -requires_python = "~= 3.8" -summary = "A command line tool, to simplify vendoring pure Python dependencies." -dependencies = [ - "click", - "jsonschema", - "packaging", - "requests", - "rich", - "toml", -] - [[package]] name = "verspec" version = "0.1.0" summary = "Flexible version handling" +[[package]] +name = "virtualenv" +version = "20.15.0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +summary = "Virtual Python Environment builder" +dependencies = [ + "distlib<1,>=0.3.1", + "filelock<4,>=3.2", + "importlib-metadata>=0.12; python_version < \"3.8\"", + "platformdirs<3,>=2", + "six<2,>=1.9.0", +] + [[package]] name = "watchdog" version = "2.1.7" requires_python = ">=3.6" summary = "Filesystem events monitoring" -[[package]] -name = "wheel" -version = "0.37.1" -requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -summary = "A built-package format for Python" - [[package]] name = "zipp" version = "3.8.0" @@ -640,17 +674,13 @@ summary = "Backport of pathlib-compatible object wrapper for zip files" [metadata] lock_version = "3.1" -content_hash = "sha256:214ff0e84d8c04b7a633957f7df2c714d4764fc16979212186f1d8d1807b2c56" +content_hash = "sha256:ad6ea4424077f60e0abd3e1c7339e9b29ed8b6b2471b7e38755cd92bb936a3e5" [metadata.files] "arpeggio 1.10.2" = [ {file = "Arpeggio-1.10.2-py2.py3-none-any.whl", hash = "sha256:fed68a1cb7f529cbd4d725597cc811b7506885fcdef17d4cdcf564341a1e210b"}, {file = "Arpeggio-1.10.2.tar.gz", hash = "sha256:bfe349f252f82f82d84cb886f1d5081d1a31451e6045275e9f90b65d0daa06f1"}, ] -"astunparse 1.6.3" = [ - {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, - {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, -] "atomicwrites 1.4.0" = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -662,21 +692,25 @@ content_hash = "sha256:214ff0e84d8c04b7a633957f7df2c714d4764fc16979212186f1d8d18 "blinker 1.4" = [ {file = "blinker-1.4.tar.gz", hash = "sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6"}, ] +"cachecontrol 0.12.11" = [ + {file = "CacheControl-0.12.11-py2.py3-none-any.whl", hash = "sha256:2c75d6a8938cb1933c75c50184549ad42728a27e9f6b92fd677c3151aa72555b"}, + {file = "CacheControl-0.12.11.tar.gz", hash = "sha256:a5b9fcc986b184db101aa280b42ecdcdfc524892596f606858e0b7a8b4d9e144"}, +] "cached-property 1.5.2" = [ {file = "cached_property-1.5.2-py2.py3-none-any.whl", hash = "sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0"}, {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, ] -"certifi 2021.10.8" = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, +"certifi 2022.5.18.1" = [ + {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, + {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, ] "charset-normalizer 2.0.12" = [ {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, ] -"click 8.1.2" = [ - {file = "click-8.1.2-py3-none-any.whl", hash = "sha256:24e1a4a9ec5bf6299411369b208c1df2188d9eb8d916302fe6bf03faed227f1e"}, - {file = "click-8.1.2.tar.gz", hash = "sha256:479707fe14d9ec9a0757618b7a100a0ae4c4e236fac5b7f80ca68028141a1a72"}, +"click 8.1.3" = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] "click-default-group 1.2.2" = [ {file = "click-default-group-1.2.2.tar.gz", hash = "sha256:d9560e8e8dfa44b3562fbc9425042a0fd6d21956fcc2db0077f63f34253ab904"}, @@ -732,21 +766,29 @@ content_hash = "sha256:214ff0e84d8c04b7a633957f7df2c714d4764fc16979212186f1d8d18 {file = "coverage-6.3.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf"}, {file = "coverage-6.3.2.tar.gz", hash = "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9"}, ] +"distlib 0.3.4" = [ + {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, + {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, +] "execnet 1.9.0" = [ {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, ] +"filelock 3.7.1" = [ + {file = "filelock-3.7.1-py3-none-any.whl", hash = "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404"}, + {file = "filelock-3.7.1.tar.gz", hash = "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04"}, +] "findpython 0.1.6" = [ {file = "findpython-0.1.6-py3-none-any.whl", hash = "sha256:79ec09965019b73c83f49df0de9a056d2055abf694f3813966a4841e80cf734e"}, {file = "findpython-0.1.6.tar.gz", hash = "sha256:9fd6185cdcb96baa7109308447efb493b2c7f1a8f569e128af14d726b2a69e18"}, ] -"ghp-import 2.0.2" = [ - {file = "ghp_import-2.0.2-py3-none-any.whl", hash = "sha256:5f8962b30b20652cdffa9c5a9812f7de6bcb56ec475acac579807719bf242c46"}, - {file = "ghp-import-2.0.2.tar.gz", hash = "sha256:947b3771f11be850c852c64b561c600fdddf794bab363060854c1ee7ad05e071"}, +"ghp-import 2.1.0" = [ + {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, + {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, ] -"griffe 0.17.0" = [ - {file = "griffe-0.17.0-py3-none-any.whl", hash = "sha256:5f3e5fff41f04b754dd77b6cea1040b2d3ecb6cb900c592f4f3b41879aaaecfd"}, - {file = "griffe-0.17.0.tar.gz", hash = "sha256:a445b3f64662d29584f1691e5d5573ed64d6ee7a1cac86be7d6807b1ac0c51e5"}, +"griffe 0.18.0" = [ + {file = "griffe-0.18.0-py3-none-any.whl", hash = "sha256:578e52513851f4652eb57543364df1675a755536b0ba8548cbb96dd07962b115"}, + {file = "griffe-0.18.0.tar.gz", hash = "sha256:b6231f80a002322ac3fcfc2e1cb02a016666b7caba1da9468fe52a5ef8710dec"}, ] "idna 3.3" = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, @@ -756,10 +798,6 @@ content_hash = "sha256:214ff0e84d8c04b7a633957f7df2c714d4764fc16979212186f1d8d18 {file = "importlib_metadata-4.11.3-py3-none-any.whl", hash = "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6"}, {file = "importlib_metadata-4.11.3.tar.gz", hash = "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"}, ] -"importlib-resources 5.7.1" = [ - {file = "importlib_resources-5.7.1-py3-none-any.whl", hash = "sha256:e447dc01619b1e951286f3929be820029d48c75eb25d265c28b92a16548212b8"}, - {file = "importlib_resources-5.7.1.tar.gz", hash = "sha256:b6062987dfc51f0fcb809187cffbd60f35df7acb4589091f154214af6d0d49d3"}, -] "incremental 21.3.0" = [ {file = "incremental-21.3.0-py2.py3-none-any.whl", hash = "sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321"}, {file = "incremental-21.3.0.tar.gz", hash = "sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57"}, @@ -772,17 +810,21 @@ content_hash = "sha256:214ff0e84d8c04b7a633957f7df2c714d4764fc16979212186f1d8d18 {file = "installer-0.5.1-py3-none-any.whl", hash = "sha256:1d6c8d916ed82771945b9c813699e6f57424ded970c9d8bf16bbc23e1e826ed3"}, {file = "installer-0.5.1.tar.gz", hash = "sha256:f970995ec2bb815e2fdaf7977b26b2091e1e386f0f42eafd5ac811953dc5d445"}, ] -"jinja2 3.1.1" = [ - {file = "Jinja2-3.1.1-py3-none-any.whl", hash = "sha256:539835f51a74a69f41b848a9645dbdc35b4f20a3b601e2d9a7e22947b15ff119"}, - {file = "Jinja2-3.1.1.tar.gz", hash = "sha256:640bed4bb501cbd17194b3cace1dc2126f5b619cf068a726b98192a0fde74ae9"}, +"jinja2 3.1.2" = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +"lockfile 0.12.2" = [ + {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"}, + {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, ] -"jsonschema 4.4.0" = [ - {file = "jsonschema-4.4.0-py3-none-any.whl", hash = "sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823"}, - {file = "jsonschema-4.4.0.tar.gz", hash = "sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83"}, +"markdown 3.3.7" = [ + {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, + {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, ] -"markdown 3.3.6" = [ - {file = "Markdown-3.3.6-py3-none-any.whl", hash = "sha256:9923332318f843411e9932237530df53162e29dc7a4e2b91e35764583c46c9a3"}, - {file = "Markdown-3.3.6.tar.gz", hash = "sha256:76df8ae32294ec39dcf89340382882dfa12975f87f45c3ed1ecdb1e8cefc7006"}, +"markdown-exec 0.7.0" = [ + {file = "markdown_exec-0.7.0-py3-none-any.whl", hash = "sha256:c40feadeddf0070656adf3bb9a1b65a4eb337361b9bca3a47542fbda22306ba8"}, + {file = "markdown-exec-0.7.0.tar.gz", hash = "sha256:a4f6939ac36dccac560d3889a518b654748cc57b8de8491c374f9e19c93dfba8"}, ] "markdown-include 0.6.0" = [ {file = "markdown-include-0.6.0.tar.gz", hash = "sha256:6f5d680e36f7780c7f0f61dca53ca581bd50d1b56137ddcd6353efafa0c3e4a2"}, @@ -845,25 +887,61 @@ content_hash = "sha256:214ff0e84d8c04b7a633957f7df2c714d4764fc16979212186f1d8d18 {file = "mkdocs_autorefs-0.4.1-py3-none-any.whl", hash = "sha256:a2248a9501b29dc0cc8ba4c09f4f47ff121945f6ce33d760f145d6f89d313f5b"}, {file = "mkdocs-autorefs-0.4.1.tar.gz", hash = "sha256:70748a7bd025f9ecd6d6feeba8ba63f8e891a1af55f48e366d6d6e78493aba84"}, ] -"mkdocs-material 8.2.9" = [ - {file = "mkdocs_material-8.2.9-py2.py3-none-any.whl", hash = "sha256:fbe39baa57c70fdbe9d1a24c6c2d0625e255e74f22b20aff43abb64157446f4d"}, - {file = "mkdocs-material-8.2.9.tar.gz", hash = "sha256:c177ff180b024bc061714c9483a8d26d36e1b9fdef4be8e70e243770416fe9d7"}, +"mkdocs-material 8.2.13" = [ + {file = "mkdocs_material-8.2.13-py2.py3-none-any.whl", hash = "sha256:2666f1d7d6a8dc28dda1e777f77add12799e66bd00250de99914a33525763816"}, + {file = "mkdocs-material-8.2.13.tar.gz", hash = "sha256:505408fe001d668543236f5db5a88771460ad83ef7b58826630cc1f8b7e63099"}, ] "mkdocs-material-extensions 1.0.3" = [ {file = "mkdocs_material_extensions-1.0.3-py3-none-any.whl", hash = "sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44"}, {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, ] -"mkdocstrings 0.18.1" = [ - {file = "mkdocstrings-0.18.1-py3-none-any.whl", hash = "sha256:4053929356df8cd69ed32eef71d8f676a472ef72980c9ffd4f933ead1debcdad"}, - {file = "mkdocstrings-0.18.1.tar.gz", hash = "sha256:fb7c91ce7e3ab70488d3fa6c073a4f827cdc319042f682ef8ea95459790d64fc"}, +"mkdocstrings 0.19.0" = [ + {file = "mkdocstrings-0.19.0-py3-none-any.whl", hash = "sha256:3217d510d385c961f69385a670b2677e68e07b5fea4a504d86bf54c006c87c7d"}, + {file = "mkdocstrings-0.19.0.tar.gz", hash = "sha256:efa34a67bad11229d532d89f6836a8a215937548623b64f3698a1df62e01cc3e"}, ] "mkdocstrings-python 0.6.6" = [ {file = "mkdocstrings_python-0.6.6-py3-none-any.whl", hash = "sha256:c118438d3cb4b14c492a51d109f4e5b27ab06ba19b099d624430dfd904926152"}, {file = "mkdocstrings-python-0.6.6.tar.gz", hash = "sha256:37281696b9f199624ae420e0625b6659b7fdfbea736618bce7fd978682dea3b1"}, ] -"mkdocstrings-python-legacy 0.2.2" = [ - {file = "mkdocstrings_python_legacy-0.2.2-py3-none-any.whl", hash = "sha256:379107a3a5b8db9b462efc4493c122efe21e825e3702425dbd404621302a563a"}, - {file = "mkdocstrings-python-legacy-0.2.2.tar.gz", hash = "sha256:f0e7ec6a19750581b752acb38f6b32fcd1efe006f14f6703125d2c2c9a5c6f02"}, +"mkdocstrings 0.18.1" = [ + {file = "mkdocstrings-0.18.1-py3-none-any.whl", hash = "sha256:4053929356df8cd69ed32eef71d8f676a472ef72980c9ffd4f933ead1debcdad"}, + {file = "mkdocstrings-0.18.1.tar.gz", hash = "sha256:fb7c91ce7e3ab70488d3fa6c073a4f827cdc319042f682ef8ea95459790d64fc"}, +] +"msgpack 1.0.3" = [ + {file = "msgpack-1.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:96acc674bb9c9be63fa8b6dabc3248fdc575c4adc005c440ad02f87ca7edd079"}, + {file = "msgpack-1.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c3ca57c96c8e69c1a0d2926a6acf2d9a522b41dc4253a8945c4c6cd4981a4e3"}, + {file = "msgpack-1.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0a792c091bac433dfe0a70ac17fc2087d4595ab835b47b89defc8bbabcf5c73"}, + {file = "msgpack-1.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c58cdec1cb5fcea8c2f1771d7b5fec79307d056874f746690bd2bdd609ab147"}, + {file = "msgpack-1.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f97c0f35b3b096a330bb4a1a9247d0bd7e1f3a2eba7ab69795501504b1c2c39"}, + {file = "msgpack-1.0.3-cp310-cp310-win32.whl", hash = "sha256:36a64a10b16c2ab31dcd5f32d9787ed41fe68ab23dd66957ca2826c7f10d0b85"}, + {file = "msgpack-1.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c1ba333b4024c17c7591f0f372e2daa3c31db495a9b2af3cf664aef3c14354f7"}, + {file = "msgpack-1.0.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c2140cf7a3ec475ef0938edb6eb363fa704159e0bf71dde15d953bacc1cf9d7d"}, + {file = "msgpack-1.0.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f4c22717c74d44bcd7af353024ce71c6b55346dad5e2cc1ddc17ce8c4507c6b"}, + {file = "msgpack-1.0.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d733a15ade190540c703de209ffbc42a3367600421b62ac0c09fde594da6ec"}, + {file = "msgpack-1.0.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7e03b06f2982aa98d4ddd082a210c3db200471da523f9ac197f2828e80e7770"}, + {file = "msgpack-1.0.3-cp36-cp36m-win32.whl", hash = "sha256:3d875631ecab42f65f9dce6f55ce6d736696ced240f2634633188de2f5f21af9"}, + {file = "msgpack-1.0.3-cp36-cp36m-win_amd64.whl", hash = "sha256:40fb89b4625d12d6027a19f4df18a4de5c64f6f3314325049f219683e07e678a"}, + {file = "msgpack-1.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6eef0cf8db3857b2b556213d97dd82de76e28a6524853a9beb3264983391dc1a"}, + {file = "msgpack-1.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d8c332f53ffff01953ad25131272506500b14750c1d0ce8614b17d098252fbc"}, + {file = "msgpack-1.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c0903bd93cbd34653dd63bbfcb99d7539c372795201f39d16fdfde4418de43a"}, + {file = "msgpack-1.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf1e6bfed4860d72106f4e0a1ab519546982b45689937b40257cfd820650b920"}, + {file = "msgpack-1.0.3-cp37-cp37m-win32.whl", hash = "sha256:d02cea2252abc3756b2ac31f781f7a98e89ff9759b2e7450a1c7a0d13302ff50"}, + {file = "msgpack-1.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2f30dd0dc4dfe6231ad253b6f9f7128ac3202ae49edd3f10d311adc358772dba"}, + {file = "msgpack-1.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f201d34dc89342fabb2a10ed7c9a9aaaed9b7af0f16a5923f1ae562b31258dea"}, + {file = "msgpack-1.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bb87f23ae7d14b7b3c21009c4b1705ec107cb21ee71975992f6aca571fb4a42a"}, + {file = "msgpack-1.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a3a5c4b16e9d0edb823fe54b59b5660cc8d4782d7bf2c214cb4b91a1940a8ef"}, + {file = "msgpack-1.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f74da1e5fcf20ade12c6bf1baa17a2dc3604958922de8dc83cbe3eff22e8b611"}, + {file = "msgpack-1.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73a80bd6eb6bcb338c1ec0da273f87420829c266379c8c82fa14c23fb586cfa1"}, + {file = "msgpack-1.0.3-cp38-cp38-win32.whl", hash = "sha256:9fce00156e79af37bb6db4e7587b30d11e7ac6a02cb5bac387f023808cd7d7f4"}, + {file = "msgpack-1.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:9b6f2d714c506e79cbead331de9aae6837c8dd36190d02da74cb409b36162e8a"}, + {file = "msgpack-1.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:89908aea5f46ee1474cc37fbc146677f8529ac99201bc2faf4ef8edc023c2bf3"}, + {file = "msgpack-1.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:973ad69fd7e31159eae8f580f3f707b718b61141838321c6fa4d891c4a2cca52"}, + {file = "msgpack-1.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da24375ab4c50e5b7486c115a3198d207954fe10aaa5708f7b65105df09109b2"}, + {file = "msgpack-1.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a598d0685e4ae07a0672b59792d2cc767d09d7a7f39fd9bd37ff84e060b1a996"}, + {file = "msgpack-1.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4c309a68cb5d6bbd0c50d5c71a25ae81f268c2dc675c6f4ea8ab2feec2ac4e2"}, + {file = "msgpack-1.0.3-cp39-cp39-win32.whl", hash = "sha256:494471d65b25a8751d19c83f1a482fd411d7ca7a3b9e17d25980a74075ba0e88"}, + {file = "msgpack-1.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:f01b26c2290cbd74316990ba84a14ac3d599af9cebefc543d241a66e785cf17d"}, + {file = "msgpack-1.0.3.tar.gz", hash = "sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e"}, ] "packaging 21.3" = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, @@ -873,17 +951,17 @@ content_hash = "sha256:214ff0e84d8c04b7a633957f7df2c714d4764fc16979212186f1d8d18 {file = "parver-0.3.1-py2.py3-none-any.whl", hash = "sha256:41a548c51b006a2f2522b54293cbfd2514bffa10774ece8430c9964a20cbd8b4"}, {file = "parver-0.3.1.tar.gz", hash = "sha256:c902e0653bcce927cc156a7fd9b3a51924cbce3bf3d0bfd49fc282bfd0c5dfd3"}, ] -"pdm-pep517 0.12.3" = [ - {file = "pdm_pep517-0.12.3-py3-none-any.whl", hash = "sha256:c03760e32eeca43da01ffa6c6911cd8c9acb5f232dff1454dd470f4533b80b60"}, - {file = "pdm-pep517-0.12.3.tar.gz", hash = "sha256:117bfb1619b86beb34fe009b8d41acacf790d1fc5240ccc11f6cada151d46887"}, +"pdm-pep517 1.0.0" = [ + {file = "pdm_pep517-1.0.0-py3-none-any.whl", hash = "sha256:66cd7a037d10004a311abbfd196560ccaa02e98856ca0c40b5c455423509aad9"}, + {file = "pdm-pep517-1.0.0.tar.gz", hash = "sha256:eb68ef1c790f30b6ed4dfc3bbe14e160379fd4e10fbedf4e799c016c07776b7b"}, ] "pep517 0.12.0" = [ {file = "pep517-0.12.0-py2.py3-none-any.whl", hash = "sha256:dd884c326898e2c6e11f9e0b64940606a93eb10ea022a2e067959f3a110cf161"}, {file = "pep517-0.12.0.tar.gz", hash = "sha256:931378d93d11b298cf511dd634cf5ea4cb249a28ef84160b3247ee9afb4e8ab0"}, ] -"pip 22.0.4" = [ - {file = "pip-22.0.4-py3-none-any.whl", hash = "sha256:c6aca0f2f081363f689f041d90dab2a07a9a07fb840284db2218117a52da800b"}, - {file = "pip-22.0.4.tar.gz", hash = "sha256:b3a9de2c6ef801e9247d1527a4b16f92f2cc141cd1489f3fffaf6a9e96729764"}, +"pip 22.1.1" = [ + {file = "pip-22.1.1-py3-none-any.whl", hash = "sha256:e7bcf0b2cbdec2af84cc1b7b79b25fdbd7228fbdb61a4dca0b82810d0ba9d18b"}, + {file = "pip-22.1.1.tar.gz", hash = "sha256:8dfb15d8a1c3d3085a4cbe11f29e19527dfaf2ba99354326fd62cec013eaee81"}, ] "platformdirs 2.5.2" = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, @@ -913,32 +991,9 @@ content_hash = "sha256:214ff0e84d8c04b7a633957f7df2c714d4764fc16979212186f1d8d18 {file = "pyparsing-3.0.8-py3-none-any.whl", hash = "sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06"}, {file = "pyparsing-3.0.8.tar.gz", hash = "sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954"}, ] -"pyrsistent 0.18.1" = [ - {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, - {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, - {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, - {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, -] -"pytest 7.1.1" = [ - {file = "pytest-7.1.1-py3-none-any.whl", hash = "sha256:92f723789a8fdd7180b6b06483874feca4c48a5c76968e03bb3e7f806a1869ea"}, - {file = "pytest-7.1.1.tar.gz", hash = "sha256:841132caef6b1ad17a9afde46dc4f6cfa59a05f9555aae5151f73bdf2820ca63"}, +"pytest 7.1.2" = [ + {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, + {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, ] "pytest-cov 3.0.0" = [ {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, @@ -964,10 +1019,6 @@ content_hash = "sha256:214ff0e84d8c04b7a633957f7df2c714d4764fc16979212186f1d8d18 {file = "python_dotenv-0.20.0-py3-none-any.whl", hash = "sha256:d92a187be61fe482e4fd675b6d52200e7be63a12b724abbf931a40ce4fa92938"}, {file = "python-dotenv-0.20.0.tar.gz", hash = "sha256:b7e3b04a59693c42c36f9ab1cc2acc46fa5df8c78e178fc33a8d4cd05c8d498f"}, ] -"pytkdocs 0.16.1" = [ - {file = "pytkdocs-0.16.1-py3-none-any.whl", hash = "sha256:a8c3f46ecef0b92864cc598e9101e9c4cf832ebbf228f50c84aa5dd850aac379"}, - {file = "pytkdocs-0.16.1.tar.gz", hash = "sha256:e2ccf6dfe9dbbceb09818673f040f1a7c32ed0bffb2d709b06be6453c4026045"}, -] "pyyaml 6.0" = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, @@ -1011,13 +1062,17 @@ content_hash = "sha256:214ff0e84d8c04b7a633957f7df2c714d4764fc16979212186f1d8d18 {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, ] +"requests-toolbelt 0.9.1" = [ + {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"}, + {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"}, +] "resolvelib 0.8.1" = [ {file = "resolvelib-0.8.1-py2.py3-none-any.whl", hash = "sha256:d9b7907f055c3b3a2cfc56c914ffd940122915826ff5fb5b1de0c99778f4de98"}, {file = "resolvelib-0.8.1.tar.gz", hash = "sha256:c6ea56732e9fb6fca1b2acc2ccc68a0b6b8c566d8f3e78e0443310ede61dbd37"}, ] -"rich 12.2.0" = [ - {file = "rich-12.2.0-py3-none-any.whl", hash = "sha256:c50f3d253bc6a9bb9c79d61a26d510d74abdf1b16881260fab5edfc3edfb082f"}, - {file = "rich-12.2.0.tar.gz", hash = "sha256:ea74bc9dad9589d8eea3e3fd0b136d8bf6e428888955f215824c2894f0da8b47"}, +"rich 12.3.0" = [ + {file = "rich-12.3.0-py3-none-any.whl", hash = "sha256:0eb63013630c6ee1237e0e395d51cb23513de6b5531235e33889e8842bdf3a6f"}, + {file = "rich-12.3.0.tar.gz", hash = "sha256:7e8700cda776337036a712ff0495b04052fb5f957c7dfb8df997f88350044b64"}, ] "setuptools 62.3.3" = [ {file = "setuptools-62.3.3-py3-none-any.whl", hash = "sha256:d1746e7fd520e83bbe210d02fff1aa1a425ad671c7a9da7d246ec2401a087198"}, @@ -1039,30 +1094,42 @@ content_hash = "sha256:214ff0e84d8c04b7a633957f7df2c714d4764fc16979212186f1d8d18 {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -"tomlkit 0.10.1" = [ - {file = "tomlkit-0.10.1-py3-none-any.whl", hash = "sha256:3eba517439dcb2f84cf39f4f85fd2c3398309823a3c75ac3e73003638daf7915"}, - {file = "tomlkit-0.10.1.tar.gz", hash = "sha256:3c517894eadef53e9072d343d37e4427b8f0b6200a70b7c9a19b2ebd1f53b951"}, +"tomlkit 0.10.2" = [ + {file = "tomlkit-0.10.2-py3-none-any.whl", hash = "sha256:905cf92c2111ef80d355708f47ac24ad1b6fc2adc5107455940088c9bbecaedb"}, + {file = "tomlkit-0.10.2.tar.gz", hash = "sha256:30d54c0b914e595f3d10a87888599eab5321a2a69abc773bbefff51599b72db6"}, ] "towncrier 21.9.0" = [ {file = "towncrier-21.9.0-py2.py3-none-any.whl", hash = "sha256:fc5a88a2a54988e3a8ed2b60d553599da8330f65722cc607c839614ed87e0f92"}, {file = "towncrier-21.9.0.tar.gz", hash = "sha256:9cb6f45c16e1a1eec9d0e7651165e7be60cd0ab81d13a5c96ca97a498ae87f48"}, ] +"tox 3.25.0" = [ + {file = "tox-3.25.0-py2.py3-none-any.whl", hash = "sha256:0805727eb4d6b049de304977dfc9ce315a1938e6619c3ab9f38682bb04662a5a"}, + {file = "tox-3.25.0.tar.gz", hash = "sha256:37888f3092aa4e9f835fc8cc6dadbaaa0782651c41ef359e3a5743fcb0308160"}, +] +"tox-pdm 0.5.0" = [ + {file = "tox_pdm-0.5.0-py3-none-any.whl", hash = "sha256:0075ff9ed47ee13dc6e55122e6da121661a5553a633b8dd278013fbee81e4bb4"}, + {file = "tox-pdm-0.5.0.tar.gz", hash = "sha256:45531c80c9670e7d9a0109ec5ab0d9add0492db4b41aa4ae0f20b295e4fd60e3"}, +] "typing-extensions 4.2.0" = [ {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, ] +"unearth 0.4.0" = [ + {file = "unearth-0.4.0-py3-none-any.whl", hash = "sha256:1b557fbc677814534e6386984f41ad0b60fe52f3ad26d6480871542197edf298"}, + {file = "unearth-0.4.0.tar.gz", hash = "sha256:d1e071f2e119d00514ebdcd264276f9fe2010a81c62314bfa8766a26faaa6ea6"}, +] "urllib3 1.26.9" = [ {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, ] -"vendoring 1.2.0" = [ - {file = "vendoring-1.2.0-py2.py3-none-any.whl", hash = "sha256:35b5fca683264e69e851a7580bb6a6f9848af024ffc8382ed5491bcfa55750c6"}, - {file = "vendoring-1.2.0.tar.gz", hash = "sha256:6340a84bf542222c96f22ebc3cb87e4d86932dc04bc8d446e38285594702c00e"}, -] "verspec 0.1.0" = [ {file = "verspec-0.1.0-py3-none-any.whl", hash = "sha256:741877d5633cc9464c45a469ae2a31e801e6dbbaa85b9675d481cda100f11c31"}, {file = "verspec-0.1.0.tar.gz", hash = "sha256:c4504ca697b2056cdb4bfa7121461f5a0e81809255b41c03dda4ba823637c01e"}, ] +"virtualenv 20.15.0" = [ + {file = "virtualenv-20.15.0-py2.py3-none-any.whl", hash = "sha256:804cce4de5b8a322f099897e308eecc8f6e2951f1a8e7e2b3598dff865f01336"}, + {file = "virtualenv-20.15.0.tar.gz", hash = "sha256:4c44b1d77ca81f8368e2d7414f9b20c428ad16b343ac6d226206c5b84e2b4fcc"}, +] "watchdog 2.1.7" = [ {file = "watchdog-2.1.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:177bae28ca723bc00846466016d34f8c1d6a621383b6caca86745918d55c7383"}, {file = "watchdog-2.1.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d1cf7dfd747dec519486a98ef16097e6c480934ef115b16f18adb341df747a4"}, @@ -1089,10 +1156,6 @@ content_hash = "sha256:214ff0e84d8c04b7a633957f7df2c714d4764fc16979212186f1d8d18 {file = "watchdog-2.1.7-py3-none-win_ia64.whl", hash = "sha256:351e09b6d9374d5bcb947e6ac47a608ec25b9d70583e9db00b2fcdb97b00b572"}, {file = "watchdog-2.1.7.tar.gz", hash = "sha256:3fd47815353be9c44eebc94cc28fe26b2b0c5bd889dafc4a5a7cbdf924143480"}, ] -"wheel 0.37.1" = [ - {file = "wheel-0.37.1-py2.py3-none-any.whl", hash = "sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a"}, - {file = "wheel-0.37.1.tar.gz", hash = "sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4"}, -] "zipp 3.8.0" = [ {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, diff --git a/pdm/__init__.py b/pdm/__init__.py index 394bfd1ed0..40eaeb5228 100644 --- a/pdm/__init__.py +++ b/pdm/__init__.py @@ -16,14 +16,3 @@ "Synchronizer", "Core", ) - - -def _fix_pkg_resources() -> None: - import importlib - import sys - - sys.modules["pkg_resources"] = importlib.import_module("pip._vendor.pkg_resources") - - -_fix_pkg_resources() -del _fix_pkg_resources diff --git a/pdm/_types.py b/pdm/_types.py index ce3dad3487..95b99500e8 100644 --- a/pdm/_types.py +++ b/pdm/_types.py @@ -1,22 +1,18 @@ -import sys -from typing import Any, Dict, List, NamedTuple, Tuple, Union +from __future__ import annotations -if sys.version_info >= (3, 8): - from importlib.metadata import Distribution - from typing import Literal, Protocol, TypedDict -else: - from importlib_metadata import Distribution - from typing_extensions import Literal, Protocol, TypedDict +from typing import Any, Dict, List, NamedTuple, Tuple, TypeVar, Union + +from pdm.compat import Literal, Protocol, TypedDict class Source(TypedDict, total=False): url: str verify_ssl: bool name: str - type: Union[Literal["index"], Literal["find_links"]] + type: Literal["index", "find_links"] -RequirementDict = Union[str, Dict[str, Union[bool, str]]] +RequirementDict = Union[str, Dict[str, Union[str, bool]]] CandidateInfo = Tuple[List[str], str, str] @@ -31,15 +27,18 @@ def __lt__(self, __other: Any) -> bool: ... -SearchResult = List[Package] +SpinnerT = TypeVar("SpinnerT", bound="Spinner") + -__all__ = ( - "Literal", - "Source", - "RequirementDict", - "CandidateInfo", - "Distribution", - "Package", - "SearchResult", - "Protocol", -) +class Spinner(Protocol): + def update(self, text: str) -> None: + ... + + def __enter__(self: SpinnerT) -> SpinnerT: + ... + + def __exit__(self, *args: Any) -> None: + ... + + +SearchResult = List[Package] diff --git a/pdm/_vendor/colorama/LICENSE.txt b/pdm/_vendor/colorama/LICENSE.txt deleted file mode 100644 index 3105888ec1..0000000000 --- a/pdm/_vendor/colorama/LICENSE.txt +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2010 Jonathan Hartley -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holders, nor those of its contributors - may be used to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pdm/_vendor/colorama/__init__.py b/pdm/_vendor/colorama/__init__.py deleted file mode 100644 index b149ed79b0..0000000000 --- a/pdm/_vendor/colorama/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from .initialise import init, deinit, reinit, colorama_text -from .ansi import Fore, Back, Style, Cursor -from .ansitowin32 import AnsiToWin32 - -__version__ = '0.4.4' diff --git a/pdm/_vendor/colorama/ansi.py b/pdm/_vendor/colorama/ansi.py deleted file mode 100644 index 11ec695ff7..0000000000 --- a/pdm/_vendor/colorama/ansi.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -''' -This module generates ANSI character codes to printing colors to terminals. -See: http://en.wikipedia.org/wiki/ANSI_escape_code -''' - -CSI = '\033[' -OSC = '\033]' -BEL = '\a' - - -def code_to_chars(code): - return CSI + str(code) + 'm' - -def set_title(title): - return OSC + '2;' + title + BEL - -def clear_screen(mode=2): - return CSI + str(mode) + 'J' - -def clear_line(mode=2): - return CSI + str(mode) + 'K' - - -class AnsiCodes(object): - def __init__(self): - # the subclasses declare class attributes which are numbers. - # Upon instantiation we define instance attributes, which are the same - # as the class attributes but wrapped with the ANSI escape sequence - for name in dir(self): - if not name.startswith('_'): - value = getattr(self, name) - setattr(self, name, code_to_chars(value)) - - -class AnsiCursor(object): - def UP(self, n=1): - return CSI + str(n) + 'A' - def DOWN(self, n=1): - return CSI + str(n) + 'B' - def FORWARD(self, n=1): - return CSI + str(n) + 'C' - def BACK(self, n=1): - return CSI + str(n) + 'D' - def POS(self, x=1, y=1): - return CSI + str(y) + ';' + str(x) + 'H' - - -class AnsiFore(AnsiCodes): - BLACK = 30 - RED = 31 - GREEN = 32 - YELLOW = 33 - BLUE = 34 - MAGENTA = 35 - CYAN = 36 - WHITE = 37 - RESET = 39 - - # These are fairly well supported, but not part of the standard. - LIGHTBLACK_EX = 90 - LIGHTRED_EX = 91 - LIGHTGREEN_EX = 92 - LIGHTYELLOW_EX = 93 - LIGHTBLUE_EX = 94 - LIGHTMAGENTA_EX = 95 - LIGHTCYAN_EX = 96 - LIGHTWHITE_EX = 97 - - -class AnsiBack(AnsiCodes): - BLACK = 40 - RED = 41 - GREEN = 42 - YELLOW = 43 - BLUE = 44 - MAGENTA = 45 - CYAN = 46 - WHITE = 47 - RESET = 49 - - # These are fairly well supported, but not part of the standard. - LIGHTBLACK_EX = 100 - LIGHTRED_EX = 101 - LIGHTGREEN_EX = 102 - LIGHTYELLOW_EX = 103 - LIGHTBLUE_EX = 104 - LIGHTMAGENTA_EX = 105 - LIGHTCYAN_EX = 106 - LIGHTWHITE_EX = 107 - - -class AnsiStyle(AnsiCodes): - BRIGHT = 1 - DIM = 2 - NORMAL = 22 - RESET_ALL = 0 - -Fore = AnsiFore() -Back = AnsiBack() -Style = AnsiStyle() -Cursor = AnsiCursor() diff --git a/pdm/_vendor/colorama/ansitowin32.py b/pdm/_vendor/colorama/ansitowin32.py deleted file mode 100644 index 6039a05432..0000000000 --- a/pdm/_vendor/colorama/ansitowin32.py +++ /dev/null @@ -1,258 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import re -import sys -import os - -from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL -from .winterm import WinTerm, WinColor, WinStyle -from .win32 import windll, winapi_test - - -winterm = None -if windll is not None: - winterm = WinTerm() - - -class StreamWrapper(object): - ''' - Wraps a stream (such as stdout), acting as a transparent proxy for all - attribute access apart from method 'write()', which is delegated to our - Converter instance. - ''' - def __init__(self, wrapped, converter): - # double-underscore everything to prevent clashes with names of - # attributes on the wrapped stream object. - self.__wrapped = wrapped - self.__convertor = converter - - def __getattr__(self, name): - return getattr(self.__wrapped, name) - - def __enter__(self, *args, **kwargs): - # special method lookup bypasses __getattr__/__getattribute__, see - # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit - # thus, contextlib magic methods are not proxied via __getattr__ - return self.__wrapped.__enter__(*args, **kwargs) - - def __exit__(self, *args, **kwargs): - return self.__wrapped.__exit__(*args, **kwargs) - - def write(self, text): - self.__convertor.write(text) - - def isatty(self): - stream = self.__wrapped - if 'PYCHARM_HOSTED' in os.environ: - if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__): - return True - try: - stream_isatty = stream.isatty - except AttributeError: - return False - else: - return stream_isatty() - - @property - def closed(self): - stream = self.__wrapped - try: - return stream.closed - except AttributeError: - return True - - -class AnsiToWin32(object): - ''' - Implements a 'write()' method which, on Windows, will strip ANSI character - sequences from the text, and if outputting to a tty, will convert them into - win32 function calls. - ''' - ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer - ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command - - def __init__(self, wrapped, convert=None, strip=None, autoreset=False): - # The wrapped stream (normally sys.stdout or sys.stderr) - self.wrapped = wrapped - - # should we reset colors to defaults after every .write() - self.autoreset = autoreset - - # create the proxy wrapping our output stream - self.stream = StreamWrapper(wrapped, self) - - on_windows = os.name == 'nt' - # We test if the WinAPI works, because even if we are on Windows - # we may be using a terminal that doesn't support the WinAPI - # (e.g. Cygwin Terminal). In this case it's up to the terminal - # to support the ANSI codes. - conversion_supported = on_windows and winapi_test() - - # should we strip ANSI sequences from our output? - if strip is None: - strip = conversion_supported or (not self.stream.closed and not self.stream.isatty()) - self.strip = strip - - # should we should convert ANSI sequences into win32 calls? - if convert is None: - convert = conversion_supported and not self.stream.closed and self.stream.isatty() - self.convert = convert - - # dict of ansi codes to win32 functions and parameters - self.win32_calls = self.get_win32_calls() - - # are we wrapping stderr? - self.on_stderr = self.wrapped is sys.stderr - - def should_wrap(self): - ''' - True if this class is actually needed. If false, then the output - stream will not be affected, nor will win32 calls be issued, so - wrapping stdout is not actually required. This will generally be - False on non-Windows platforms, unless optional functionality like - autoreset has been requested using kwargs to init() - ''' - return self.convert or self.strip or self.autoreset - - def get_win32_calls(self): - if self.convert and winterm: - return { - AnsiStyle.RESET_ALL: (winterm.reset_all, ), - AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), - AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), - AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), - AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), - AnsiFore.RED: (winterm.fore, WinColor.RED), - AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), - AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), - AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), - AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), - AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), - AnsiFore.WHITE: (winterm.fore, WinColor.GREY), - AnsiFore.RESET: (winterm.fore, ), - AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), - AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), - AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), - AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), - AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), - AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), - AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), - AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), - AnsiBack.BLACK: (winterm.back, WinColor.BLACK), - AnsiBack.RED: (winterm.back, WinColor.RED), - AnsiBack.GREEN: (winterm.back, WinColor.GREEN), - AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), - AnsiBack.BLUE: (winterm.back, WinColor.BLUE), - AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), - AnsiBack.CYAN: (winterm.back, WinColor.CYAN), - AnsiBack.WHITE: (winterm.back, WinColor.GREY), - AnsiBack.RESET: (winterm.back, ), - AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), - AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), - AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), - AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), - AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), - AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), - AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), - AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), - } - return dict() - - def write(self, text): - if self.strip or self.convert: - self.write_and_convert(text) - else: - self.wrapped.write(text) - self.wrapped.flush() - if self.autoreset: - self.reset_all() - - - def reset_all(self): - if self.convert: - self.call_win32('m', (0,)) - elif not self.strip and not self.stream.closed: - self.wrapped.write(Style.RESET_ALL) - - - def write_and_convert(self, text): - ''' - Write the given text to our wrapped stream, stripping any ANSI - sequences from the text, and optionally converting them into win32 - calls. - ''' - cursor = 0 - text = self.convert_osc(text) - for match in self.ANSI_CSI_RE.finditer(text): - start, end = match.span() - self.write_plain_text(text, cursor, start) - self.convert_ansi(*match.groups()) - cursor = end - self.write_plain_text(text, cursor, len(text)) - - - def write_plain_text(self, text, start, end): - if start < end: - self.wrapped.write(text[start:end]) - self.wrapped.flush() - - - def convert_ansi(self, paramstring, command): - if self.convert: - params = self.extract_params(command, paramstring) - self.call_win32(command, params) - - - def extract_params(self, command, paramstring): - if command in 'Hf': - params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) - while len(params) < 2: - # defaults: - params = params + (1,) - else: - params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) - if len(params) == 0: - # defaults: - if command in 'JKm': - params = (0,) - elif command in 'ABCD': - params = (1,) - - return params - - - def call_win32(self, command, params): - if command == 'm': - for param in params: - if param in self.win32_calls: - func_args = self.win32_calls[param] - func = func_args[0] - args = func_args[1:] - kwargs = dict(on_stderr=self.on_stderr) - func(*args, **kwargs) - elif command in 'J': - winterm.erase_screen(params[0], on_stderr=self.on_stderr) - elif command in 'K': - winterm.erase_line(params[0], on_stderr=self.on_stderr) - elif command in 'Hf': # cursor position - absolute - winterm.set_cursor_position(params, on_stderr=self.on_stderr) - elif command in 'ABCD': # cursor position - relative - n = params[0] - # A - up, B - down, C - forward, D - back - x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] - winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) - - - def convert_osc(self, text): - for match in self.ANSI_OSC_RE.finditer(text): - start, end = match.span() - text = text[:start] + text[end:] - paramstring, command = match.groups() - if command == BEL: - if paramstring.count(";") == 1: - params = paramstring.split(";") - # 0 - change title and icon (we will only change title) - # 1 - change icon (we don't support this) - # 2 - change title - if params[0] in '02': - winterm.set_title(params[1]) - return text diff --git a/pdm/_vendor/colorama/initialise.py b/pdm/_vendor/colorama/initialise.py deleted file mode 100644 index 430d066872..0000000000 --- a/pdm/_vendor/colorama/initialise.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import atexit -import contextlib -import sys - -from .ansitowin32 import AnsiToWin32 - - -orig_stdout = None -orig_stderr = None - -wrapped_stdout = None -wrapped_stderr = None - -atexit_done = False - - -def reset_all(): - if AnsiToWin32 is not None: # Issue #74: objects might become None at exit - AnsiToWin32(orig_stdout).reset_all() - - -def init(autoreset=False, convert=None, strip=None, wrap=True): - - if not wrap and any([autoreset, convert, strip]): - raise ValueError('wrap=False conflicts with any other arg=True') - - global wrapped_stdout, wrapped_stderr - global orig_stdout, orig_stderr - - orig_stdout = sys.stdout - orig_stderr = sys.stderr - - if sys.stdout is None: - wrapped_stdout = None - else: - sys.stdout = wrapped_stdout = \ - wrap_stream(orig_stdout, convert, strip, autoreset, wrap) - if sys.stderr is None: - wrapped_stderr = None - else: - sys.stderr = wrapped_stderr = \ - wrap_stream(orig_stderr, convert, strip, autoreset, wrap) - - global atexit_done - if not atexit_done: - atexit.register(reset_all) - atexit_done = True - - -def deinit(): - if orig_stdout is not None: - sys.stdout = orig_stdout - if orig_stderr is not None: - sys.stderr = orig_stderr - - -@contextlib.contextmanager -def colorama_text(*args, **kwargs): - init(*args, **kwargs) - try: - yield - finally: - deinit() - - -def reinit(): - if wrapped_stdout is not None: - sys.stdout = wrapped_stdout - if wrapped_stderr is not None: - sys.stderr = wrapped_stderr - - -def wrap_stream(stream, convert, strip, autoreset, wrap): - if wrap: - wrapper = AnsiToWin32(stream, - convert=convert, strip=strip, autoreset=autoreset) - if wrapper.should_wrap(): - stream = wrapper.stream - return stream diff --git a/pdm/_vendor/colorama/win32.py b/pdm/_vendor/colorama/win32.py deleted file mode 100644 index c2d8360336..0000000000 --- a/pdm/_vendor/colorama/win32.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. - -# from winbase.h -STDOUT = -11 -STDERR = -12 - -try: - import ctypes - from ctypes import LibraryLoader - windll = LibraryLoader(ctypes.WinDLL) - from ctypes import wintypes -except (AttributeError, ImportError): - windll = None - SetConsoleTextAttribute = lambda *_: None - winapi_test = lambda *_: None -else: - from ctypes import byref, Structure, c_char, POINTER - - COORD = wintypes._COORD - - class CONSOLE_SCREEN_BUFFER_INFO(Structure): - """struct in wincon.h.""" - _fields_ = [ - ("dwSize", COORD), - ("dwCursorPosition", COORD), - ("wAttributes", wintypes.WORD), - ("srWindow", wintypes.SMALL_RECT), - ("dwMaximumWindowSize", COORD), - ] - def __str__(self): - return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( - self.dwSize.Y, self.dwSize.X - , self.dwCursorPosition.Y, self.dwCursorPosition.X - , self.wAttributes - , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right - , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X - ) - - _GetStdHandle = windll.kernel32.GetStdHandle - _GetStdHandle.argtypes = [ - wintypes.DWORD, - ] - _GetStdHandle.restype = wintypes.HANDLE - - _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo - _GetConsoleScreenBufferInfo.argtypes = [ - wintypes.HANDLE, - POINTER(CONSOLE_SCREEN_BUFFER_INFO), - ] - _GetConsoleScreenBufferInfo.restype = wintypes.BOOL - - _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute - _SetConsoleTextAttribute.argtypes = [ - wintypes.HANDLE, - wintypes.WORD, - ] - _SetConsoleTextAttribute.restype = wintypes.BOOL - - _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition - _SetConsoleCursorPosition.argtypes = [ - wintypes.HANDLE, - COORD, - ] - _SetConsoleCursorPosition.restype = wintypes.BOOL - - _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA - _FillConsoleOutputCharacterA.argtypes = [ - wintypes.HANDLE, - c_char, - wintypes.DWORD, - COORD, - POINTER(wintypes.DWORD), - ] - _FillConsoleOutputCharacterA.restype = wintypes.BOOL - - _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute - _FillConsoleOutputAttribute.argtypes = [ - wintypes.HANDLE, - wintypes.WORD, - wintypes.DWORD, - COORD, - POINTER(wintypes.DWORD), - ] - _FillConsoleOutputAttribute.restype = wintypes.BOOL - - _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW - _SetConsoleTitleW.argtypes = [ - wintypes.LPCWSTR - ] - _SetConsoleTitleW.restype = wintypes.BOOL - - def _winapi_test(handle): - csbi = CONSOLE_SCREEN_BUFFER_INFO() - success = _GetConsoleScreenBufferInfo( - handle, byref(csbi)) - return bool(success) - - def winapi_test(): - return any(_winapi_test(h) for h in - (_GetStdHandle(STDOUT), _GetStdHandle(STDERR))) - - def GetConsoleScreenBufferInfo(stream_id=STDOUT): - handle = _GetStdHandle(stream_id) - csbi = CONSOLE_SCREEN_BUFFER_INFO() - success = _GetConsoleScreenBufferInfo( - handle, byref(csbi)) - return csbi - - def SetConsoleTextAttribute(stream_id, attrs): - handle = _GetStdHandle(stream_id) - return _SetConsoleTextAttribute(handle, attrs) - - def SetConsoleCursorPosition(stream_id, position, adjust=True): - position = COORD(*position) - # If the position is out of range, do nothing. - if position.Y <= 0 or position.X <= 0: - return - # Adjust for Windows' SetConsoleCursorPosition: - # 1. being 0-based, while ANSI is 1-based. - # 2. expecting (x,y), while ANSI uses (y,x). - adjusted_position = COORD(position.Y - 1, position.X - 1) - if adjust: - # Adjust for viewport's scroll position - sr = GetConsoleScreenBufferInfo(STDOUT).srWindow - adjusted_position.Y += sr.Top - adjusted_position.X += sr.Left - # Resume normal processing - handle = _GetStdHandle(stream_id) - return _SetConsoleCursorPosition(handle, adjusted_position) - - def FillConsoleOutputCharacter(stream_id, char, length, start): - handle = _GetStdHandle(stream_id) - char = c_char(char.encode()) - length = wintypes.DWORD(length) - num_written = wintypes.DWORD(0) - # Note that this is hard-coded for ANSI (vs wide) bytes. - success = _FillConsoleOutputCharacterA( - handle, char, length, start, byref(num_written)) - return num_written.value - - def FillConsoleOutputAttribute(stream_id, attr, length, start): - ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' - handle = _GetStdHandle(stream_id) - attribute = wintypes.WORD(attr) - length = wintypes.DWORD(length) - num_written = wintypes.DWORD(0) - # Note that this is hard-coded for ANSI (vs wide) bytes. - return _FillConsoleOutputAttribute( - handle, attribute, length, start, byref(num_written)) - - def SetConsoleTitle(title): - return _SetConsoleTitleW(title) diff --git a/pdm/_vendor/colorama/winterm.py b/pdm/_vendor/colorama/winterm.py deleted file mode 100644 index 0fdb4ec4e9..0000000000 --- a/pdm/_vendor/colorama/winterm.py +++ /dev/null @@ -1,169 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from . import win32 - - -# from wincon.h -class WinColor(object): - BLACK = 0 - BLUE = 1 - GREEN = 2 - CYAN = 3 - RED = 4 - MAGENTA = 5 - YELLOW = 6 - GREY = 7 - -# from wincon.h -class WinStyle(object): - NORMAL = 0x00 # dim text, dim background - BRIGHT = 0x08 # bright text, dim background - BRIGHT_BACKGROUND = 0x80 # dim text, bright background - -class WinTerm(object): - - def __init__(self): - self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes - self.set_attrs(self._default) - self._default_fore = self._fore - self._default_back = self._back - self._default_style = self._style - # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. - # So that LIGHT_EX colors and BRIGHT style do not clobber each other, - # we track them separately, since LIGHT_EX is overwritten by Fore/Back - # and BRIGHT is overwritten by Style codes. - self._light = 0 - - def get_attrs(self): - return self._fore + self._back * 16 + (self._style | self._light) - - def set_attrs(self, value): - self._fore = value & 7 - self._back = (value >> 4) & 7 - self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) - - def reset_all(self, on_stderr=None): - self.set_attrs(self._default) - self.set_console(attrs=self._default) - self._light = 0 - - def fore(self, fore=None, light=False, on_stderr=False): - if fore is None: - fore = self._default_fore - self._fore = fore - # Emulate LIGHT_EX with BRIGHT Style - if light: - self._light |= WinStyle.BRIGHT - else: - self._light &= ~WinStyle.BRIGHT - self.set_console(on_stderr=on_stderr) - - def back(self, back=None, light=False, on_stderr=False): - if back is None: - back = self._default_back - self._back = back - # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style - if light: - self._light |= WinStyle.BRIGHT_BACKGROUND - else: - self._light &= ~WinStyle.BRIGHT_BACKGROUND - self.set_console(on_stderr=on_stderr) - - def style(self, style=None, on_stderr=False): - if style is None: - style = self._default_style - self._style = style - self.set_console(on_stderr=on_stderr) - - def set_console(self, attrs=None, on_stderr=False): - if attrs is None: - attrs = self.get_attrs() - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - win32.SetConsoleTextAttribute(handle, attrs) - - def get_position(self, handle): - position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition - # Because Windows coordinates are 0-based, - # and win32.SetConsoleCursorPosition expects 1-based. - position.X += 1 - position.Y += 1 - return position - - def set_cursor_position(self, position=None, on_stderr=False): - if position is None: - # I'm not currently tracking the position, so there is no default. - # position = self.get_position() - return - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - win32.SetConsoleCursorPosition(handle, position) - - def cursor_adjust(self, x, y, on_stderr=False): - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - position = self.get_position(handle) - adjusted_position = (position.Y + y, position.X + x) - win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) - - def erase_screen(self, mode=0, on_stderr=False): - # 0 should clear from the cursor to the end of the screen. - # 1 should clear from the cursor to the beginning of the screen. - # 2 should clear the entire screen, and move cursor to (1,1) - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - csbi = win32.GetConsoleScreenBufferInfo(handle) - # get the number of character cells in the current buffer - cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y - # get number of character cells before current cursor position - cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X - if mode == 0: - from_coord = csbi.dwCursorPosition - cells_to_erase = cells_in_screen - cells_before_cursor - elif mode == 1: - from_coord = win32.COORD(0, 0) - cells_to_erase = cells_before_cursor - elif mode == 2: - from_coord = win32.COORD(0, 0) - cells_to_erase = cells_in_screen - else: - # invalid mode - return - # fill the entire screen with blanks - win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) - # now set the buffer's attributes accordingly - win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) - if mode == 2: - # put the cursor where needed - win32.SetConsoleCursorPosition(handle, (1, 1)) - - def erase_line(self, mode=0, on_stderr=False): - # 0 should clear from the cursor to the end of the line. - # 1 should clear from the cursor to the beginning of the line. - # 2 should clear the entire line. - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - csbi = win32.GetConsoleScreenBufferInfo(handle) - if mode == 0: - from_coord = csbi.dwCursorPosition - cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X - elif mode == 1: - from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) - cells_to_erase = csbi.dwCursorPosition.X - elif mode == 2: - from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) - cells_to_erase = csbi.dwSize.X - else: - # invalid mode - return - # fill the entire screen with blanks - win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) - # now set the buffer's attributes accordingly - win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) - - def set_title(self, title): - win32.SetConsoleTitle(title) diff --git a/pdm/_vendor/halo/LICENSE b/pdm/_vendor/halo/LICENSE deleted file mode 100644 index 9b114e0378..0000000000 --- a/pdm/_vendor/halo/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Manraj Singh - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/pdm/_vendor/halo/__init__.py b/pdm/_vendor/halo/__init__.py deleted file mode 100644 index e9d67d9252..0000000000 --- a/pdm/_vendor/halo/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# -*- coding: utf-8 -*- -__author__ = 'Manraj Singh' -__email__ = 'manrajsinghgrover@gmail.com' - -import logging - -from .halo import Halo -from .halo_notebook import HaloNotebook - -logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/pdm/_vendor/halo/_utils.py b/pdm/_vendor/halo/_utils.py deleted file mode 100644 index c0b5522d0e..0000000000 --- a/pdm/_vendor/halo/_utils.py +++ /dev/null @@ -1,145 +0,0 @@ -# -*- coding: utf-8 -*- -"""Utilities for Halo library. -""" -import codecs -import platform -try: - from shutil import get_terminal_size -except ImportError: - from backports.shutil_get_terminal_size import get_terminal_size - -from pdm._vendor.termcolor import colored - - -def is_supported(): - """Check whether operating system supports main symbols or not. - - Returns - ------- - boolean - Whether operating system supports main symbols or not - """ - - os_arch = platform.system() - - if os_arch != 'Windows': - return True - - return False - - -def get_environment(): - """Get the environment in which halo is running - - Returns - ------- - str - Environment name - """ - try: - from IPython import get_ipython - except ImportError: - return 'terminal' - - try: - shell = get_ipython().__class__.__name__ - - if shell == 'ZMQInteractiveShell': # Jupyter notebook or qtconsole - return 'jupyter' - elif shell == 'TerminalInteractiveShell': # Terminal running IPython - return 'ipython' - else: - return 'terminal' # Other type (?) - - except NameError: - return 'terminal' - - -def colored_frame(frame, color): - """Color the frame with given color and returns. - - Parameters - ---------- - frame : str - Frame to be colored - color : str - Color to be applied - - Returns - ------- - str - Colored frame - """ - return colored(frame, color, attrs=['bold']) - - -def is_text_type(text): - """Check if given parameter is a string or not - - Parameters - ---------- - text : * - Parameter to be checked for text type - - Returns - ------- - bool - Whether parameter is a string or not - """ - return isinstance(text, str) - - -def decode_utf_8_text(text): - """Decode the text from utf-8 format - - Parameters - ---------- - text : str - String to be decoded - - Returns - ------- - str - Decoded string - """ - try: - return codecs.decode(text, 'utf-8') - except (TypeError, ValueError): - return text - - -def encode_utf_8_text(text): - """Encodes the text to utf-8 format - - Parameters - ---------- - text : str - String to be encoded - - Returns - ------- - str - Encoded string - """ - try: - return codecs.encode(text, 'utf-8', 'ignore') - except (TypeError, ValueError): - return text - - -def get_terminal_columns(): - """Determine the amount of available columns in the terminal - - Returns - ------- - int - Terminal width - """ - terminal_size = get_terminal_size() - - # If column size is 0 either we are not connected - # to a terminal or something else went wrong. Fallback to 80. - if terminal_size.columns == 0: - return 80 - else: - return terminal_size.columns diff --git a/pdm/_vendor/halo/cursor.py b/pdm/_vendor/halo/cursor.py deleted file mode 100644 index b0e54c9fa4..0000000000 --- a/pdm/_vendor/halo/cursor.py +++ /dev/null @@ -1,48 +0,0 @@ -""" -Source: https://stackoverflow.com/a/10455937/2692667 -""" - -import sys -import os - -if os.name == "nt": - import ctypes - - class _CursorInfo(ctypes.Structure): - _fields_ = [("size", ctypes.c_int), ("visible", ctypes.c_byte)] - - -def hide(stream=sys.stdout): - """Hide cursor. - Parameters - ---------- - stream: sys.stdout, Optional - Defines stream to write output to. - """ - if os.name == "nt": - ci = _CursorInfo() - handle = ctypes.windll.kernel32.GetStdHandle(-11) - ctypes.windll.kernel32.GetConsoleCursorInfo(handle, ctypes.byref(ci)) - ci.visible = False - ctypes.windll.kernel32.SetConsoleCursorInfo(handle, ctypes.byref(ci)) - elif os.name == "posix": - stream.write("\033[?25l") - stream.flush() - - -def show(stream=sys.stdout): - """Show cursor. - Parameters - ---------- - stream: sys.stdout, Optional - Defines stream to write output to. - """ - if os.name == "nt": - ci = _CursorInfo() - handle = ctypes.windll.kernel32.GetStdHandle(-11) - ctypes.windll.kernel32.GetConsoleCursorInfo(handle, ctypes.byref(ci)) - ci.visible = True - ctypes.windll.kernel32.SetConsoleCursorInfo(handle, ctypes.byref(ci)) - elif os.name == "posix": - stream.write("\033[?25h") - stream.flush() diff --git a/pdm/_vendor/halo/halo.py b/pdm/_vendor/halo/halo.py deleted file mode 100644 index e76d601900..0000000000 --- a/pdm/_vendor/halo/halo.py +++ /dev/null @@ -1,676 +0,0 @@ -# -*- coding: utf-8 -*- -# pylint: disable=unsubscriptable-object -"""Beautiful terminal spinners in Python. -""" -from __future__ import absolute_import, unicode_literals - -import atexit -import functools -import sys -import threading -import time - -import pdm._vendor.halo.cursor as cursor - -from pdm._vendor.log_symbols.symbols import LogSymbols, is_supported -from pdm._vendor.spinners.spinners import Spinners - -from pdm._vendor.halo._utils import ( - colored_frame, - decode_utf_8_text, - get_environment, - get_terminal_columns, - is_text_type, - encode_utf_8_text, -) - - -class Halo(object): - """Halo library. - Attributes - ---------- - CLEAR_LINE : str - Code to clear the line - """ - - CLEAR_LINE = "\033[K" - CLEAR_REST = "\033[J" - SPINNER_PLACEMENTS = ( - "left", - "right", - ) - - # a global list to keep all Halo instances - _instances = [] - _lock = threading.Lock() - - def __init__( - self, - text="", - color="cyan", - text_color=None, - spinner=None, - animation=None, - placement="left", - interval=-1, - enabled=True, - indent="", - stream=sys.stdout, - ): - """Constructs the Halo object. - Parameters - ---------- - text : str, optional - Text to display. - text_color : str, optional - Color of the text. - color : str, optional - Color of the text to display. - spinner : str|dict, optional - String or dictionary representing spinner. String can be one of 60+ spinners - supported. - animation: str, optional - Animation to apply if text is too large. Can be one of `bounce`, `marquee`. - Defaults to ellipses. - placement: str, optional - Side of the text to place the spinner on. Can be `left` or `right`. - Defaults to `left`. - interval : integer, optional - Interval between each frame of the spinner in milliseconds. - enabled : boolean, optional - Spinner enabled or not. - stream : io, optional - Output. - """ - self._color = color - self._animation = animation - - self.spinner = spinner - self.text = text - self._text_color = text_color - - self._interval = ( - int(interval) if int(interval) > 0 else self._spinner["interval"] - ) - self._stream = stream - - self.placement = placement - self._frame_index = 0 - self._text_index = 0 - self._spinner_thread = None - self._stop_spinner = None - self._spinner_id = None - self.enabled = enabled - self._stopped = False - self._content = "" - self.indent = indent - - environment = get_environment() - - def clean_up(): - """Handle cell execution""" - self.stop() - - if environment in ("ipython", "jupyter"): - from IPython import get_ipython - - ip = get_ipython() - ip.events.register("post_run_cell", clean_up) - else: # default terminal - atexit.register(clean_up) - - def __enter__(self): - """Starts the spinner on a separate thread. For use in context managers. - Returns - ------- - self - """ - return self.start() - - def __exit__(self, type, value, traceback): - """Stops the spinner. For use in context managers.""" - self.stop() - - def __call__(self, f): - """Allow the Halo object to be used as a regular function decorator.""" - - @functools.wraps(f) - def wrapped(*args, **kwargs): - with self: - return f(*args, **kwargs) - - return wrapped - - @property - def spinner(self): - """Getter for spinner property. - Returns - ------- - dict - spinner value - """ - return self._spinner - - @spinner.setter - def spinner(self, spinner=None): - """Setter for spinner property. - Parameters - ---------- - spinner : dict, str - Defines the spinner value with frame and interval - """ - - self._spinner = self._get_spinner(spinner) - self._frame_index = 0 - self._text_index = 0 - - @property - def text(self): - """Getter for text property. - Returns - ------- - str - text value - """ - return self._text["original"] - - @text.setter - def text(self, text): - """Setter for text property. - Parameters - ---------- - text : str - Defines the text value for spinner - """ - self._text = self._get_text(text) - - @property - def text_color(self): - """Getter for text color property. - Returns - ------- - str - text color value - """ - return self._text_color - - @text_color.setter - def text_color(self, text_color): - """Setter for text color property. - Parameters - ---------- - text_color : str - Defines the text color value for spinner - """ - self._text_color = text_color - - @property - def color(self): - """Getter for color property. - Returns - ------- - str - color value - """ - return self._color - - @color.setter - def color(self, color): - """Setter for color property. - Parameters - ---------- - color : str - Defines the color value for spinner - """ - self._color = color - - @property - def placement(self): - """Getter for placement property. - Returns - ------- - str - spinner placement - """ - return self._placement - - @placement.setter - def placement(self, placement): - """Setter for placement property. - Parameters - ---------- - placement: str - Defines the placement of the spinner - """ - if placement not in self.SPINNER_PLACEMENTS: - raise ValueError( - "Unknown spinner placement '{0}', available are {1}".format( - placement, self.SPINNER_PLACEMENTS - ) - ) - self._placement = placement - - @property - def spinner_id(self): - """Getter for spinner id - Returns - ------- - str - Spinner id value - """ - return self._spinner_id - - @property - def animation(self): - """Getter for animation property. - Returns - ------- - str - Spinner animation - """ - return self._animation - - @animation.setter - def animation(self, animation): - """Setter for animation property. - Parameters - ---------- - animation: str - Defines the animation of the spinner - """ - self._animation = animation - self._text = self._get_text(self._text["original"]) - - def _check_stream(self): - """Returns whether the stream is open, and if applicable, writable - Returns - ------- - bool - Whether the stream is open - """ - if self._stream.closed: - return False - - try: - # Attribute access kept separate from invocation, to avoid - # swallowing AttributeErrors from the call which should bubble up. - check_stream_writable = self._stream.writable - except AttributeError: - pass - else: - return check_stream_writable() - - return True - - def _pop_stream_content_until_self(self, clear_self=False): - """Move cursor to the end of this instance's content and erase all contents - following it. - Parameters - ---------- - clear_self: bool - If equals True, the content of current line will also get cleared - Returns - ------- - str - The content of stream following this instance. - """ - erased_content = [] - lines_to_erase = self._content.count("\n") if clear_self else 0 - for inst in Halo._instances[::-1]: - if inst is self: - break - erased_content.append(inst._content) - lines_to_erase += inst._content.count("\n") - - if lines_to_erase > 0: - # Move cursor up n lines - self._write_stream("\033[{}A".format(lines_to_erase)) - # Erase rest content - self._write_stream(self.CLEAR_REST) - return "".join(reversed(erased_content)) - - def _write_stream(self, s): - """Write to the stream, if writable - Parameters - ---------- - s : str - Characters to write to the stream - """ - if self._check_stream(): - self._stream.write(s) - - def _write(self, s, overwrite=False): - """Write to the stream and keep following lines unchanged. - Parameters - ---------- - s : str - Characters to write to the stream - overwrite: bool - If set to True, overwrite the content of current instance. - """ - if s.startswith("\r"): - s = f"\r{self.indent}{s[1:]}" - else: - s = f"{self.indent}{s}" - with Halo._lock: - erased_content = self._pop_stream_content_until_self(overwrite) - self._write_stream(s) - # Write back following lines - self._write_stream(erased_content) - self._content = s if overwrite else self._content + s - - def _hide_cursor(self): - """Disable the user's blinking cursor""" - if self._check_stream() and self._stream.isatty(): - cursor.hide(stream=self._stream) - - def _show_cursor(self): - """Re-enable the user's blinking cursor""" - if self._check_stream() and self._stream.isatty(): - cursor.show(stream=self._stream) - - def _get_spinner(self, spinner): - """Extracts spinner value from options and returns value - containing spinner frames and interval, defaults to 'dots' spinner. - Parameters - ---------- - spinner : dict, str - Contains spinner value or type of spinner to be used - Returns - ------- - dict - Contains frames and interval defining spinner - """ - default_spinner = Spinners["dots"].value - - if spinner and type(spinner) == dict: - return spinner - - if is_supported(): - if all([is_text_type(spinner), spinner in Spinners.__members__]): - return Spinners[spinner].value - else: - return default_spinner - else: - return Spinners["line"].value - - def _get_text(self, text): - """Creates frames based on the selected animation - Returns - ------- - self - """ - animation = self._animation - stripped_text = text.strip() - - # Check which frame of the animation is the widest - max_spinner_length = max([len(i) for i in self._spinner["frames"]]) - - # Subtract to the current terminal size the max spinner length - # (-1 to leave room for the extra space between spinner and text) - terminal_width = get_terminal_columns() - max_spinner_length - 1 - text_length = len(stripped_text) - - frames = [] - - if terminal_width < text_length and animation: - if animation == "bounce": - """ - Make the text bounce back and forth - """ - for x in range(0, text_length - terminal_width + 1): - frames.append(stripped_text[x : terminal_width + x]) - frames.extend(list(reversed(frames))) - elif "marquee": - """ - Make the text scroll like a marquee - """ - stripped_text = stripped_text + " " + stripped_text[:terminal_width] - for x in range(0, text_length + 1): - frames.append(stripped_text[x : terminal_width + x]) - elif terminal_width < text_length and not animation: - # Add ellipsis if text is larger than terminal width and no animation was specified - frames = [stripped_text[: terminal_width - 6] + " (...)"] - else: - frames = [stripped_text] - - return {"original": text, "frames": frames} - - def clear(self): - """Clears the line and returns cursor to the start. - of line - Returns - ------- - self - """ - with Halo._lock: - erased_content = self._pop_stream_content_until_self(True) - self._content = "" - self._write_stream(erased_content) - return self - - def _render_frame(self): - """Renders the frame on the line after clearing it.""" - if not self.enabled: - # in case we're disabled or stream is closed while still rendering, - # we render the frame and increment the frame index, so the proper - # frame is rendered if we're re-enabled or the stream opens again. - return - - frame = self.frame() - output = "\r{}\n".format(frame) - try: - self._write(output, True) - except UnicodeEncodeError: - self._write(encode_utf_8_text(output), True) - - def render(self): - """Runs the render until thread flag is set. - Returns - ------- - self - """ - while not self._stop_spinner.is_set(): - self._render_frame() - time.sleep(0.001 * self._interval) - - return self - - def frame(self): - """Builds and returns the frame to be rendered - Returns - ------- - self - """ - frames = self._spinner["frames"] - frame = frames[self._frame_index] - - if self._color: - frame = colored_frame(frame, self._color) - - self._frame_index += 1 - self._frame_index = self._frame_index % len(frames) - - text_frame = self.text_frame() - return "{0} {1}".format( - *[ - (text_frame, frame) - if self._placement == "right" - else (frame, text_frame) - ][0] - ) - - def text_frame(self): - """Builds and returns the text frame to be rendered - Returns - ------- - self - """ - if len(self._text["frames"]) == 1: - if self._text_color: - return colored_frame(self._text["frames"][0], self._text_color) - - # Return first frame (can't return original text because at this point it might be ellipsed) - return self._text["frames"][0] - - frames = self._text["frames"] - frame = frames[self._text_index] - - self._text_index += 1 - self._text_index = self._text_index % len(frames) - - if self._text_color: - return colored_frame(frame, self._text_color) - - return frame - - def start(self, text=None): - """Starts the spinner on a separate thread. - Parameters - ---------- - text : None, optional - Text to be used alongside spinner - Returns - ------- - self - """ - if text is not None: - self.text = text - - if self._spinner_id is not None: - return self - - if not (self.enabled and self._check_stream()): - return self - - # Clear all stale Halo instances created before - # Check against Halo._instances instead of self._instances - # to avoid possible overriding in subclasses. - if all(inst._stopped for inst in Halo._instances): - Halo._instances[:] = [] - # Allow for calling start() multiple times - if self not in Halo._instances: - Halo._instances.append(self) - self._hide_cursor() - - self._stop_spinner = threading.Event() - self._spinner_thread = threading.Thread(target=self.render) - self._spinner_thread.setDaemon(True) - self._render_frame() - self._spinner_id = self._spinner_thread.name - self._spinner_thread.start() - self._stopped = False - - return self - - def stop(self): - """Stops the spinner and clears the line. - Returns - ------- - self - """ - if self._spinner_thread and self._spinner_thread.is_alive(): - self._stop_spinner.set() - self._spinner_thread.join() - - if self._stopped: - return - - if self.enabled: - self.clear() - - self._frame_index = 0 - self._spinner_id = None - self._show_cursor() - self._stopped = True - - return self - - def succeed(self, text=None): - """Shows and persists success symbol and text and exits. - Parameters - ---------- - text : None, optional - Text to be shown alongside success symbol. - Returns - ------- - self - """ - return self.stop_and_persist(symbol=LogSymbols.SUCCESS.value, text=text) - - def fail(self, text=None): - """Shows and persists fail symbol and text and exits. - Parameters - ---------- - text : None, optional - Text to be shown alongside fail symbol. - Returns - ------- - self - """ - return self.stop_and_persist(symbol=LogSymbols.ERROR.value, text=text) - - def warn(self, text=None): - """Shows and persists warn symbol and text and exits. - Parameters - ---------- - text : None, optional - Text to be shown alongside warn symbol. - Returns - ------- - self - """ - return self.stop_and_persist(symbol=LogSymbols.WARNING.value, text=text) - - def info(self, text=None): - """Shows and persists info symbol and text and exits. - Parameters - ---------- - text : None, optional - Text to be shown alongside info symbol. - Returns - ------- - self - """ - return self.stop_and_persist(symbol=LogSymbols.INFO.value, text=text) - - def stop_and_persist(self, symbol=" ", text=None): - """Stops the spinner and persists the final frame to be shown. - Parameters - ---------- - symbol : str, optional - Symbol to be shown in final frame - text: str, optional - Text to be shown in final frame - - Returns - ------- - self - """ - if not self.enabled: - return self - - symbol = decode_utf_8_text(symbol) - - if text is not None: - text = decode_utf_8_text(text) - else: - text = self._text["original"] - - text = text.strip() - - if self._text_color: - text = colored_frame(text, self._text_color) - - self.stop() - - output = "{0} {1}\n".format( - *[(text, symbol) if self._placement == "right" else (symbol, text)][0] - ) - - try: - self._write(output) - except UnicodeEncodeError: - self._write(encode_utf_8_text(output)) - - return self diff --git a/pdm/_vendor/halo/halo_notebook.py b/pdm/_vendor/halo/halo_notebook.py deleted file mode 100644 index de595ff9c6..0000000000 --- a/pdm/_vendor/halo/halo_notebook.py +++ /dev/null @@ -1,122 +0,0 @@ -from __future__ import absolute_import, print_function, unicode_literals - -import sys -import threading - -import pdm._vendor.halo.cursor as cursor - -from pdm._vendor.halo import Halo -from pdm._vendor.halo._utils import colored_frame, decode_utf_8_text - - -class HaloNotebook(Halo): - def __init__( - self, - text="", - color="cyan", - text_color=None, - spinner=None, - placement="left", - animation=None, - interval=-1, - enabled=True, - stream=sys.stdout, - ): - super(HaloNotebook, self).__init__( - text=text, - color=color, - text_color=text_color, - spinner=spinner, - placement=placement, - animation=animation, - interval=interval, - enabled=enabled, - stream=stream, - ) - self.output = self._make_output_widget() - - def _make_output_widget(self): - from ipywidgets.widgets import Output - - return Output() - - # TODO: using property and setter - def _output(self, text=""): - return ({"name": "stdout", "output_type": "stream", "text": text},) - - def clear(self): - if not self.enabled: - return self - - with self.output: - self.output.outputs += self._output("\r") - self.output.outputs += self._output(self.CLEAR_LINE) - - self.output.outputs = self._output() - return self - - def _render_frame(self): - frame = self.frame() - output = "\r{}".format(frame) - with self.output: - self.output.outputs += self._output(output) - - def start(self, text=None): - if text is not None: - self.text = text - - if not self.enabled or self._spinner_id is not None: - return self - - if self._stream.isatty(): - cursor.hide() - - self.output = self._make_output_widget() - from IPython.display import display - - display(self.output) - self._stop_spinner = threading.Event() - self._spinner_thread = threading.Thread(target=self.render) - self._spinner_thread.setDaemon(True) - self._render_frame() - self._spinner_id = self._spinner_thread.name - self._spinner_thread.start() - - return self - - def stop_and_persist(self, symbol=" ", text=None): - """Stops the spinner and persists the final frame to be shown. - Parameters - ---------- - symbol : str, optional - Symbol to be shown in final frame - text: str, optional - Text to be shown in final frame - - Returns - ------- - self - """ - if not self.enabled: - return self - - symbol = decode_utf_8_text(symbol) - - if text is not None: - text = decode_utf_8_text(text) - else: - text = self._text["original"] - - text = text.strip() - - if self._text_color: - text = colored_frame(text, self._text_color) - - self.stop() - - output = "\r{} {}\n".format( - *[(text, symbol) if self._placement == "right" else (symbol, text)][0] - ) - - with self.output: - self.output.outputs = self._output(output) diff --git a/pdm/_vendor/log_symbols/LICENSE b/pdm/_vendor/log_symbols/LICENSE deleted file mode 100644 index 9b114e0378..0000000000 --- a/pdm/_vendor/log_symbols/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Manraj Singh - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/pdm/_vendor/log_symbols/__init__.py b/pdm/_vendor/log_symbols/__init__.py deleted file mode 100644 index 9e63b02103..0000000000 --- a/pdm/_vendor/log_symbols/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# -*- coding: utf-8 -*- -__author__ = 'Manraj Singh' -__email__ = 'manrajsinghgrover@gmail.com' - -from .symbols import LogSymbols diff --git a/pdm/_vendor/log_symbols/symbols.py b/pdm/_vendor/log_symbols/symbols.py deleted file mode 100644 index 3ed2ef0eeb..0000000000 --- a/pdm/_vendor/log_symbols/symbols.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -"""Provide log symbols for various log levels.""" -import codecs -import locale -import os -import sys - -from enum import Enum -from pdm._vendor.colorama import init, deinit, Fore - -init(autoreset=True) - -_MAIN = { - 'info': 'ℹ', - 'success': '✔', - 'warning': '⚠', - 'error': '✖' -} - -_FALLBACKS = { - 'info': '¡', - 'success': 'v', - 'warning': '!!', - 'error': '×' -} - - -def is_supported(): - """Check whether operating system supports main symbols or not. - - Returns - ------- - boolean - Whether operating system supports main symbols or not - """ - if os.getenv("DISABLE_UNICODE_OUTPUT"): - return False - encoding = getattr(sys.stdout, "encoding") - if encoding is None: - encoding = locale.getpreferredencoding(False) - - try: - encoding = codecs.lookup(encoding).name - except Exception: - encoding = "utf-8" - return encoding == "utf-8" - - -_SYMBOLS = _MAIN if is_supported() else _FALLBACKS - - -class LogSymbols(Enum): # pylint: disable=too-few-public-methods - """LogSymbol enum class. - - Attributes - ---------- - ERROR : str - Colored error symbol - INFO : str - Colored info symbol - SUCCESS : str - Colored success symbol - WARNING : str - Colored warning symbol - """ - - INFO = Fore.BLUE + _SYMBOLS['info'] + Fore.RESET - SUCCESS = Fore.GREEN + _SYMBOLS['success'] + Fore.RESET - WARNING = Fore.YELLOW + _SYMBOLS['warning'] + Fore.RESET - ERROR = Fore.RED + _SYMBOLS['error'] + Fore.RESET - -deinit() diff --git a/pdm/_vendor/spinners/LICENSE b/pdm/_vendor/spinners/LICENSE deleted file mode 100644 index 9b114e0378..0000000000 --- a/pdm/_vendor/spinners/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Manraj Singh - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/pdm/_vendor/spinners/__init__.py b/pdm/_vendor/spinners/__init__.py deleted file mode 100644 index 969320e1f4..0000000000 --- a/pdm/_vendor/spinners/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -__author__ = 'Manraj Singh' -__email__ = 'manrajsinghgrover@gmail.com' - -from .spinners import Spinners diff --git a/pdm/_vendor/spinners/spinners.py b/pdm/_vendor/spinners/spinners.py deleted file mode 100644 index 7eee4f1a51..0000000000 --- a/pdm/_vendor/spinners/spinners.py +++ /dev/null @@ -1,943 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Python wrapper for beautiful terminal spinner library. - -Spinners are from: -* cli-spinners: - MIT License - - Copyright (c) Sindre Sorhus (sindresorhus.com) - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights to - use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of - the Software, and to permit persons to whom the Software is furnished to do so, - subject to the following conditions: - - The above copyright notice and this permission notice shall be included - in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, - INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR - PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE - FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - IN THE SOFTWARE. -""" -from __future__ import unicode_literals - -from enum import Enum - -Spinners = Enum('Spinners', { - "dots": { - "interval": 80, - "frames": [ - "⠋", - "⠙", - "⠹", - "⠸", - "⠼", - "⠴", - "⠦", - "⠧", - "⠇", - "⠏" - ] - }, - "dots2": { - "interval": 80, - "frames": [ - "⣾", - "⣽", - "⣻", - "⢿", - "⡿", - "⣟", - "⣯", - "⣷" - ] - }, - "dots3": { - "interval": 80, - "frames": [ - "⠋", - "⠙", - "⠚", - "⠞", - "⠖", - "⠦", - "⠴", - "⠲", - "⠳", - "⠓" - ] - }, - "dots4": { - "interval": 80, - "frames": [ - "⠄", - "⠆", - "⠇", - "⠋", - "⠙", - "⠸", - "⠰", - "⠠", - "⠰", - "⠸", - "⠙", - "⠋", - "⠇", - "⠆" - ] - }, - "dots5": { - "interval": 80, - "frames": [ - "⠋", - "⠙", - "⠚", - "⠒", - "⠂", - "⠂", - "⠒", - "⠲", - "⠴", - "⠦", - "⠖", - "⠒", - "⠐", - "⠐", - "⠒", - "⠓", - "⠋" - ] - }, - "dots6": { - "interval": 80, - "frames": [ - "⠁", - "⠉", - "⠙", - "⠚", - "⠒", - "⠂", - "⠂", - "⠒", - "⠲", - "⠴", - "⠤", - "⠄", - "⠄", - "⠤", - "⠴", - "⠲", - "⠒", - "⠂", - "⠂", - "⠒", - "⠚", - "⠙", - "⠉", - "⠁" - ] - }, - "dots7": { - "interval": 80, - "frames": [ - "⠈", - "⠉", - "⠋", - "⠓", - "⠒", - "⠐", - "⠐", - "⠒", - "⠖", - "⠦", - "⠤", - "⠠", - "⠠", - "⠤", - "⠦", - "⠖", - "⠒", - "⠐", - "⠐", - "⠒", - "⠓", - "⠋", - "⠉", - "⠈" - ] - }, - "dots8": { - "interval": 80, - "frames": [ - "⠁", - "⠁", - "⠉", - "⠙", - "⠚", - "⠒", - "⠂", - "⠂", - "⠒", - "⠲", - "⠴", - "⠤", - "⠄", - "⠄", - "⠤", - "⠠", - "⠠", - "⠤", - "⠦", - "⠖", - "⠒", - "⠐", - "⠐", - "⠒", - "⠓", - "⠋", - "⠉", - "⠈", - "⠈" - ] - }, - "dots9": { - "interval": 80, - "frames": [ - "⢹", - "⢺", - "⢼", - "⣸", - "⣇", - "⡧", - "⡗", - "⡏" - ] - }, - "dots10": { - "interval": 80, - "frames": [ - "⢄", - "⢂", - "⢁", - "⡁", - "⡈", - "⡐", - "⡠" - ] - }, - "dots11": { - "interval": 100, - "frames": [ - "⠁", - "⠂", - "⠄", - "⡀", - "⢀", - "⠠", - "⠐", - "⠈" - ] - }, - "dots12": { - "interval": 80, - "frames": [ - "⢀⠀", - "⡀⠀", - "⠄⠀", - "⢂⠀", - "⡂⠀", - "⠅⠀", - "⢃⠀", - "⡃⠀", - "⠍⠀", - "⢋⠀", - "⡋⠀", - "⠍⠁", - "⢋⠁", - "⡋⠁", - "⠍⠉", - "⠋⠉", - "⠋⠉", - "⠉⠙", - "⠉⠙", - "⠉⠩", - "⠈⢙", - "⠈⡙", - "⢈⠩", - "⡀⢙", - "⠄⡙", - "⢂⠩", - "⡂⢘", - "⠅⡘", - "⢃⠨", - "⡃⢐", - "⠍⡐", - "⢋⠠", - "⡋⢀", - "⠍⡁", - "⢋⠁", - "⡋⠁", - "⠍⠉", - "⠋⠉", - "⠋⠉", - "⠉⠙", - "⠉⠙", - "⠉⠩", - "⠈⢙", - "⠈⡙", - "⠈⠩", - "⠀⢙", - "⠀⡙", - "⠀⠩", - "⠀⢘", - "⠀⡘", - "⠀⠨", - "⠀⢐", - "⠀⡐", - "⠀⠠", - "⠀⢀", - "⠀⡀" - ] - }, - "line": { - "interval": 130, - "frames": [ - "-", - "\\", - "|", - "/" - ] - }, - "line2": { - "interval": 100, - "frames": [ - "⠂", - "-", - "–", - "—", - "–", - "-" - ] - }, - "pipe": { - "interval": 100, - "frames": [ - "┤", - "┘", - "┴", - "└", - "├", - "┌", - "┬", - "┐" - ] - }, - "simpleDots": { - "interval": 400, - "frames": [ - ". ", - ".. ", - "...", - " " - ] - }, - "simpleDotsScrolling": { - "interval": 200, - "frames": [ - ". ", - ".. ", - "...", - " ..", - " .", - " " - ] - }, - "star": { - "interval": 70, - "frames": [ - "✶", - "✸", - "✹", - "✺", - "✹", - "✷" - ] - }, - "star2": { - "interval": 80, - "frames": [ - "+", - "x", - "*" - ] - }, - "flip": { - "interval": 70, - "frames": [ - "_", - "_", - "_", - "-", - "`", - "`", - "'", - "´", - "-", - "_", - "_", - "_" - ] - }, - "hamburger": { - "interval": 100, - "frames": [ - "☱", - "☲", - "☴" - ] - }, - "growVertical": { - "interval": 120, - "frames": [ - "▁", - "▃", - "▄", - "▅", - "▆", - "▇", - "▆", - "▅", - "▄", - "▃" - ] - }, - "growHorizontal": { - "interval": 120, - "frames": [ - "▏", - "▎", - "▍", - "▌", - "▋", - "▊", - "▉", - "▊", - "▋", - "▌", - "▍", - "▎" - ] - }, - "balloon": { - "interval": 140, - "frames": [ - " ", - ".", - "o", - "O", - "@", - "*", - " " - ] - }, - "balloon2": { - "interval": 120, - "frames": [ - ".", - "o", - "O", - "°", - "O", - "o", - "." - ] - }, - "noise": { - "interval": 100, - "frames": [ - "▓", - "▒", - "░" - ] - }, - "bounce": { - "interval": 120, - "frames": [ - "⠁", - "⠂", - "⠄", - "⠂" - ] - }, - "boxBounce": { - "interval": 120, - "frames": [ - "▖", - "▘", - "▝", - "▗" - ] - }, - "boxBounce2": { - "interval": 100, - "frames": [ - "▌", - "▀", - "▐", - "▄" - ] - }, - "triangle": { - "interval": 50, - "frames": [ - "◢", - "◣", - "◤", - "◥" - ] - }, - "arc": { - "interval": 100, - "frames": [ - "◜", - "◠", - "◝", - "◞", - "◡", - "◟" - ] - }, - "circle": { - "interval": 120, - "frames": [ - "◡", - "⊙", - "◠" - ] - }, - "squareCorners": { - "interval": 180, - "frames": [ - "◰", - "◳", - "◲", - "◱" - ] - }, - "circleQuarters": { - "interval": 120, - "frames": [ - "◴", - "◷", - "◶", - "◵" - ] - }, - "circleHalves": { - "interval": 50, - "frames": [ - "◐", - "◓", - "◑", - "◒" - ] - }, - "squish": { - "interval": 100, - "frames": [ - "╫", - "╪" - ] - }, - "toggle": { - "interval": 250, - "frames": [ - "⊶", - "⊷" - ] - }, - "toggle2": { - "interval": 80, - "frames": [ - "▫", - "▪" - ] - }, - "toggle3": { - "interval": 120, - "frames": [ - "□", - "■" - ] - }, - "toggle4": { - "interval": 100, - "frames": [ - "■", - "□", - "▪", - "▫" - ] - }, - "toggle5": { - "interval": 100, - "frames": [ - "▮", - "▯" - ] - }, - "toggle6": { - "interval": 300, - "frames": [ - "ဝ", - "၀" - ] - }, - "toggle7": { - "interval": 80, - "frames": [ - "⦾", - "⦿" - ] - }, - "toggle8": { - "interval": 100, - "frames": [ - "◍", - "◌" - ] - }, - "toggle9": { - "interval": 100, - "frames": [ - "◉", - "◎" - ] - }, - "toggle10": { - "interval": 100, - "frames": [ - "㊂", - "㊀", - "㊁" - ] - }, - "toggle11": { - "interval": 50, - "frames": [ - "⧇", - "⧆" - ] - }, - "toggle12": { - "interval": 120, - "frames": [ - "☗", - "☖" - ] - }, - "toggle13": { - "interval": 80, - "frames": [ - "=", - "*", - "-" - ] - }, - "arrow": { - "interval": 100, - "frames": [ - "←", - "↖", - "↑", - "↗", - "→", - "↘", - "↓", - "↙" - ] - }, - "arrow2": { - "interval": 80, - "frames": [ - "⬆️ ", - "↗️ ", - "➡️ ", - "↘️ ", - "⬇️ ", - "↙️ ", - "⬅️ ", - "↖️ " - ] - }, - "arrow3": { - "interval": 120, - "frames": [ - "▹▹▹▹▹", - "▸▹▹▹▹", - "▹▸▹▹▹", - "▹▹▸▹▹", - "▹▹▹▸▹", - "▹▹▹▹▸" - ] - }, - "bouncingBar": { - "interval": 80, - "frames": [ - "[ ]", - "[= ]", - "[== ]", - "[=== ]", - "[ ===]", - "[ ==]", - "[ =]", - "[ ]", - "[ =]", - "[ ==]", - "[ ===]", - "[====]", - "[=== ]", - "[== ]", - "[= ]" - ] - }, - "bouncingBall": { - "interval": 80, - "frames": [ - "( ● )", - "( ● )", - "( ● )", - "( ● )", - "( ●)", - "( ● )", - "( ● )", - "( ● )", - "( ● )", - "(● )" - ] - }, - "smiley": { - "interval": 200, - "frames": [ - "😄 ", - "😝 " - ] - }, - "monkey": { - "interval": 300, - "frames": [ - "🙈 ", - "🙈 ", - "🙉 ", - "🙊 " - ] - }, - "hearts": { - "interval": 100, - "frames": [ - "💛 ", - "💙 ", - "💜 ", - "💚 ", - "❤️ " - ] - }, - "clock": { - "interval": 100, - "frames": [ - "🕛 ", - "🕐 ", - "🕑 ", - "🕒 ", - "🕓 ", - "🕔 ", - "🕕 ", - "🕖 ", - "🕗 ", - "🕘 ", - "🕙 ", - "🕚 " - ] - }, - "earth": { - "interval": 180, - "frames": [ - "🌍 ", - "🌎 ", - "🌏 " - ] - }, - "moon": { - "interval": 80, - "frames": [ - "🌑 ", - "🌒 ", - "🌓 ", - "🌔 ", - "🌕 ", - "🌖 ", - "🌗 ", - "🌘 " - ] - }, - "runner": { - "interval": 140, - "frames": [ - "🚶 ", - "🏃 " - ] - }, - "pong": { - "interval": 80, - "frames": [ - "▐⠂ ▌", - "▐⠈ ▌", - "▐ ⠂ ▌", - "▐ ⠠ ▌", - "▐ ⡀ ▌", - "▐ ⠠ ▌", - "▐ ⠂ ▌", - "▐ ⠈ ▌", - "▐ ⠂ ▌", - "▐ ⠠ ▌", - "▐ ⡀ ▌", - "▐ ⠠ ▌", - "▐ ⠂ ▌", - "▐ ⠈ ▌", - "▐ ⠂▌", - "▐ ⠠▌", - "▐ ⡀▌", - "▐ ⠠ ▌", - "▐ ⠂ ▌", - "▐ ⠈ ▌", - "▐ ⠂ ▌", - "▐ ⠠ ▌", - "▐ ⡀ ▌", - "▐ ⠠ ▌", - "▐ ⠂ ▌", - "▐ ⠈ ▌", - "▐ ⠂ ▌", - "▐ ⠠ ▌", - "▐ ⡀ ▌", - "▐⠠ ▌" - ] - }, - "shark": { - "interval": 120, - "frames": [ - "▐|\\____________▌", - "▐_|\\___________▌", - "▐__|\\__________▌", - "▐___|\\_________▌", - "▐____|\\________▌", - "▐_____|\\_______▌", - "▐______|\\______▌", - "▐_______|\\_____▌", - "▐________|\\____▌", - "▐_________|\\___▌", - "▐__________|\\__▌", - "▐___________|\\_▌", - "▐____________|\\▌", - "▐____________/|▌", - "▐___________/|_▌", - "▐__________/|__▌", - "▐_________/|___▌", - "▐________/|____▌", - "▐_______/|_____▌", - "▐______/|______▌", - "▐_____/|_______▌", - "▐____/|________▌", - "▐___/|_________▌", - "▐__/|__________▌", - "▐_/|___________▌", - "▐/|____________▌" - ] - }, - "dqpb": { - "interval": 100, - "frames": [ - "d", - "q", - "p", - "b" - ] - }, - "weather": { - "interval": 100, - "frames": [ - "☀️ ", - "☀️ ", - "☀️ ", - "🌤 ", - "⛅️ ", - "🌥 ", - "☁️ ", - "🌧 ", - "🌨 ", - "🌧 ", - "🌨 ", - "🌧 ", - "🌨 ", - "⛈ ", - "🌨 ", - "🌧 ", - "🌨 ", - "☁️ ", - "🌥 ", - "⛅️ ", - "🌤 ", - "☀️ ", - "☀️ " - ] - }, - "christmas": { - "interval": 400, - "frames": [ - "🌲", - "🎄" - ] - }, - "grenade": { - "interval": 80, - "frames": [ - "، ", - "′ ", - " ´ ", - " ‾ ", - " ⸌", - " ⸊", - " |", - " ⁎", - " ⁕", - " ෴ ", - " ⁓", - " ", - " ", - " " - ] - }, - "point": { - "interval": 125, - "frames": [ - "∙∙∙", - "●∙∙", - "∙●∙", - "∙∙●", - "∙∙∙" - ] - }, - "layer": { - "interval": 150, - "frames": [ - "-", - "=", - "≡" - ] - } -}) diff --git a/pdm/_vendor/termcolor.COPYING.txt b/pdm/_vendor/termcolor.COPYING.txt deleted file mode 100644 index d5df97633a..0000000000 --- a/pdm/_vendor/termcolor.COPYING.txt +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2008-2011 Volvox Development Team - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - diff --git a/pdm/_vendor/termcolor.py b/pdm/_vendor/termcolor.py deleted file mode 100644 index f11b824b28..0000000000 --- a/pdm/_vendor/termcolor.py +++ /dev/null @@ -1,168 +0,0 @@ -# coding: utf-8 -# Copyright (c) 2008-2011 Volvox Development Team -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -# -# Author: Konstantin Lepa - -"""ANSII Color formatting for output in terminal.""" - -from __future__ import print_function -import os - - -__ALL__ = [ 'colored', 'cprint' ] - -VERSION = (1, 1, 0) - -ATTRIBUTES = dict( - list(zip([ - 'bold', - 'dark', - '', - 'underline', - 'blink', - '', - 'reverse', - 'concealed' - ], - list(range(1, 9)) - )) - ) -del ATTRIBUTES[''] - - -HIGHLIGHTS = dict( - list(zip([ - 'on_grey', - 'on_red', - 'on_green', - 'on_yellow', - 'on_blue', - 'on_magenta', - 'on_cyan', - 'on_white' - ], - list(range(40, 48)) - )) - ) - - -COLORS = dict( - list(zip([ - 'grey', - 'red', - 'green', - 'yellow', - 'blue', - 'magenta', - 'cyan', - 'white', - ], - list(range(30, 38)) - )) - ) - - -RESET = '\033[0m' - - -def colored(text, color=None, on_color=None, attrs=None): - """Colorize text. - - Available text colors: - red, green, yellow, blue, magenta, cyan, white. - - Available text highlights: - on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, on_white. - - Available attributes: - bold, dark, underline, blink, reverse, concealed. - - Example: - colored('Hello, World!', 'red', 'on_grey', ['blue', 'blink']) - colored('Hello, World!', 'green') - """ - if os.getenv('ANSI_COLORS_DISABLED') is None: - fmt_str = '\033[%dm%s' - if color is not None: - text = fmt_str % (COLORS[color], text) - - if on_color is not None: - text = fmt_str % (HIGHLIGHTS[on_color], text) - - if attrs is not None: - for attr in attrs: - text = fmt_str % (ATTRIBUTES[attr], text) - - text += RESET - return text - - -def cprint(text, color=None, on_color=None, attrs=None, **kwargs): - """Print colorize text. - - It accepts arguments of print function. - """ - - print((colored(text, color, on_color, attrs)), **kwargs) - - -if __name__ == '__main__': - print('Current terminal type: %s' % os.getenv('TERM')) - print('Test basic colors:') - cprint('Grey color', 'grey') - cprint('Red color', 'red') - cprint('Green color', 'green') - cprint('Yellow color', 'yellow') - cprint('Blue color', 'blue') - cprint('Magenta color', 'magenta') - cprint('Cyan color', 'cyan') - cprint('White color', 'white') - print(('-' * 78)) - - print('Test highlights:') - cprint('On grey color', on_color='on_grey') - cprint('On red color', on_color='on_red') - cprint('On green color', on_color='on_green') - cprint('On yellow color', on_color='on_yellow') - cprint('On blue color', on_color='on_blue') - cprint('On magenta color', on_color='on_magenta') - cprint('On cyan color', on_color='on_cyan') - cprint('On white color', color='grey', on_color='on_white') - print('-' * 78) - - print('Test attributes:') - cprint('Bold grey color', 'grey', attrs=['bold']) - cprint('Dark red color', 'red', attrs=['dark']) - cprint('Underline green color', 'green', attrs=['underline']) - cprint('Blink yellow color', 'yellow', attrs=['blink']) - cprint('Reversed blue color', 'blue', attrs=['reverse']) - cprint('Concealed Magenta color', 'magenta', attrs=['concealed']) - cprint('Bold underline reverse cyan color', 'cyan', - attrs=['bold', 'underline', 'reverse']) - cprint('Dark blink concealed white color', 'white', - attrs=['dark', 'blink', 'concealed']) - print(('-' * 78)) - - print('Test mixing:') - cprint('Underline red on grey color', 'red', 'on_grey', - ['underline']) - cprint('Reversed green on red color', 'green', 'on_red', ['reverse']) - diff --git a/pdm/_vendor/vendors.txt b/pdm/_vendor/vendors.txt deleted file mode 100644 index 4f0e0a01c9..0000000000 --- a/pdm/_vendor/vendors.txt +++ /dev/null @@ -1,5 +0,0 @@ -halo==0.0.31 -log_symbols==0.0.14 -spinners==0.0.24 -termcolor==1.1.0 -colorama==0.4.4 diff --git a/pdm/builders/base.py b/pdm/builders/base.py index 4f9d2ffafe..36db1a86a0 100644 --- a/pdm/builders/base.py +++ b/pdm/builders/base.py @@ -11,9 +11,9 @@ from pathlib import Path from typing import TYPE_CHECKING, Any, Iterable, Mapping -import tomli from pep517.wrappers import Pep517HookCaller +from pdm.compat import tomllib from pdm.exceptions import BuildError from pdm.models.in_process import get_sys_config_paths from pdm.models.requirements import parse_requirement @@ -74,7 +74,7 @@ def log_subprocessor( env = os.environ.copy() if extra_environ: env.update(extra_environ) - outstream = LoggerWrapper(logger, logging.DEBUG) + outstream = LoggerWrapper(logger, logging.INFO) try: subprocess.check_call( cmd, @@ -171,10 +171,10 @@ def __init__(self, src_dir: str | Path, environment: Environment) -> None: self.executable = self._env.interpreter.executable.as_posix() self.src_dir = src_dir self.isolated = environment.project.config["build_isolation"] - logger.debug("Preparing isolated env for PEP 517 build...") + logger.info("Preparing isolated env for PEP 517 build...") try: with open(os.path.join(src_dir, "pyproject.toml"), "rb") as f: - spec = tomli.load(f) + spec = tomllib.load(f) except FileNotFoundError: spec = {} except Exception as e: @@ -316,11 +316,10 @@ def ensure_setup_py(self) -> str: from pdm.project.metadata import MutableMetadata builder = Builder(self.src_dir) - if os.path.exists(os.path.join(self.src_dir, "pyproject.toml")): + project_file = os.path.join(self.src_dir, "pyproject.toml") + if os.path.exists(project_file): try: - builder._meta = MutableMetadata( - os.path.join(self.src_dir, "pyproject.toml") - ) + builder._meta = MutableMetadata.from_file(project_file) except ValueError: builder._meta = None return builder.ensure_setup_py().as_posix() diff --git a/pdm/cli/actions.py b/pdm/cli/actions.py index 90bd89cd57..893345921b 100644 --- a/pdm/cli/actions.py +++ b/pdm/cli/actions.py @@ -12,14 +12,15 @@ from itertools import chain from typing import Collection, Iterable, Mapping, Sequence, cast -import click import tomlkit from resolvelib.reporters import BaseReporter from resolvelib.resolvers import ResolutionImpossible, ResolutionTooDeep, Resolver -from pdm import signals, termui +from pdm import termui +from pdm.cli.hooks import HookManager from pdm.cli.utils import ( check_project_file, + fetch_hashes, find_importable_files, format_lockfile, format_resolution_impossible, @@ -56,8 +57,10 @@ def do_lock( requirements: list[Requirement] | None = None, dry_run: bool = False, refresh: bool = False, + hooks: HookManager | None = None, ) -> dict[str, Candidate]: """Performs the locking process and update lockfile.""" + hooks = hooks or HookManager(project) check_project_file(project) if refresh: locked_repo = project.locked_repository @@ -67,12 +70,12 @@ def do_lock( with project.core.ui.open_spinner("Re-calculating hashes..."): for key, candidate in locked_repo.packages.items(): reqs, python_requires, summary = locked_repo.candidate_info[key] - candidate.hashes = repo.get_hashes(candidate) candidate.summary = summary candidate.requires_python = python_requires ident = cast(str, key[0]) mapping[ident] = candidate dependencies[ident] = list(map(parse_requirement, reqs)) + fetch_hashes(repo, mapping) lockfile = format_lockfile(project, mapping, dependencies) project.write_lockfile(lockfile) return mapping @@ -87,35 +90,36 @@ def do_lock( with ui.logging("lock"): # The context managers are nested to ensure the spinner is stopped before # any message is thrown to the output. - with ui.open_spinner(title="Resolving dependencies", spinner="dots") as spin: - reporter = project.get_reporter(requirements, tracked_names, spin) - resolver: Resolver = project.core.resolver_class(provider, reporter) - signals.pre_lock.send(project, requirements=requirements, dry_run=dry_run) - try: + try: + with ui.open_spinner(title="Resolving dependencies") as spin: + reporter = project.get_reporter(requirements, tracked_names, spin) + resolver: Resolver = project.core.resolver_class(provider, reporter) + hooks.try_emit("pre_lock", requirements=requirements, dry_run=dry_run) mapping, dependencies = resolve( resolver, requirements, project.environment.python_requires, resolve_max_rounds, ) - except ResolutionTooDeep: - spin.fail(f"{termui.Emoji.LOCK} Lock failed") - ui.echo( - "The dependency resolution exceeds the maximum loop depth of " - f"{resolve_max_rounds}, there may be some circular dependencies " - "in your project. Try to solve them or increase the " - f"{termui.green('`strategy.resolve_max_rounds`')} config.", - err=True, - ) - raise - except ResolutionImpossible as err: - spin.fail(f"{termui.Emoji.LOCK} Lock failed") - ui.echo(format_resolution_impossible(err), err=True) - raise ResolutionImpossible("Unable to find a resolution") from None - else: - data = format_lockfile(project, mapping, dependencies) - spin.succeed(f"{termui.Emoji.LOCK} Lock successful") - signals.post_lock.send(project, resolution=mapping, dry_run=dry_run) + fetch_hashes(provider.repository, mapping) + except ResolutionTooDeep: + ui.echo(f"{termui.Emoji.LOCK} Lock failed", err=True) + ui.echo( + "The dependency resolution exceeds the maximum loop depth of " + f"{resolve_max_rounds}, there may be some circular dependencies " + "in your project. Try to solve them or increase the " + f"[green]`strategy.resolve_max_rounds`[/] config.", + err=True, + ) + raise + except ResolutionImpossible as err: + ui.echo(f"{termui.Emoji.LOCK} Lock failed", err=True) + ui.echo(format_resolution_impossible(err), err=True) + raise ResolutionImpossible("Unable to find a resolution") from None + else: + data = format_lockfile(project, mapping, dependencies) + ui.echo(f"{termui.Emoji.LOCK} Lock successful") + hooks.try_emit("post_lock", resolution=mapping, dry_run=dry_run) project.write_lockfile(data, write=not dry_run) @@ -143,6 +147,7 @@ def resolve_candidates_from_lockfile( project.environment.python_requires, resolve_max_rounds, ) + fetch_hashes(provider.repository, mapping) return mapping @@ -151,19 +156,19 @@ def check_lockfile(project: Project, raise_not_exist: bool = True) -> str | None if not project.lockfile_file.exists(): if raise_not_exist: raise ProjectError("Lock file does not exist, nothing to install") - project.core.ui.echo("Lock file does not exist", fg="yellow", err=True) + project.core.ui.echo("Lock file does not exist", style="yellow", err=True) return "all" elif not project.is_lockfile_compatible(): project.core.ui.echo( "Lock file version is not compatible with PDM, installation may fail", - fg="yellow", + style="yellow", err=True, ) return "all" elif not project.is_lockfile_hash_match(): project.core.ui.echo( "Lock file hash doesn't match pyproject.toml, packages may be outdated", - fg="yellow", + style="yellow", err=True, ) return "reuse" @@ -183,8 +188,10 @@ def do_sync( no_editable: bool | Collection[str] = False, no_self: bool = False, reinstall: bool = False, + hooks: HookManager | None = None, ) -> None: """Synchronize project""" + hooks = hooks or HookManager(project) if requirements is None: groups = translate_groups(project, default, dev, groups or ()) requirements = [] @@ -205,9 +212,9 @@ def do_sync( use_install_cache=project.config["install.cache"], reinstall=reinstall, ) - signals.pre_install.send(project, candidates=candidates, dry_run=dry_run) + hooks.try_emit("pre_install", candidates=candidates, dry_run=dry_run) handler.synchronize() - signals.post_install.send(project, candidates=candidates, dry_run=dry_run) + hooks.try_emit("post_install", candidates=candidates, dry_run=dry_run) def do_add( @@ -224,8 +231,10 @@ def do_add( no_self: bool = False, dry_run: bool = False, prerelease: bool = False, + hooks: HookManager | None = None, ) -> None: """Add packages and install""" + hooks = hooks or HookManager(project) check_project_file(project) if not editables and not packages: raise PdmUsageError("Must specify at least one package or editable package.") @@ -235,6 +244,15 @@ def do_add( group = "dev" if dev else "default" tracked_names: set[str] = set() requirements: dict[str, Requirement] = {} + if ( + group == "default" + or not dev + and group not in project.tool_settings.get("dev-dependencies", {}) + ): + if editables: + raise PdmUsageError( + "Cannot add editables to the default or optional dependency group" + ) for r in [parse_requirement(line, True) for line in editables] + [ parse_requirement(line) for line in packages ]: @@ -244,7 +262,7 @@ def do_add( requirements[key] = r project.core.ui.echo( f"Adding packages to {group} {'dev-' if dev else ''}dependencies: " - + ", ".join(termui.green(r.as_line(), bold=True) for r in requirements.values()) + + ", ".join(f"[bold green]{r.as_line()}[/]" for r in requirements.values()) ) all_dependencies = project.all_dependencies group_deps = all_dependencies.setdefault(group, {}) @@ -253,15 +271,15 @@ def do_add( req.specifier = get_specifier("") group_deps.update(requirements) reqs = [r for deps in all_dependencies.values() for r in deps.values()] - resolved = do_lock(project, strategy, tracked_names, reqs, dry_run=dry_run) + resolved = do_lock( + project, strategy, tracked_names, reqs, dry_run=dry_run, hooks=hooks + ) # Update dependency specifiers and lockfile hash. deps_to_update = group_deps if unconstrained else requirements save_version_specifiers({group: deps_to_update}, resolved, save) if not dry_run: - project.add_dependencies( - deps_to_update, group, dev, replace_editable=no_editable - ) + project.add_dependencies(deps_to_update, group, dev) project.write_lockfile(project.lockfile, False) if sync: @@ -273,6 +291,7 @@ def do_add( no_self=no_self, requirements=list(group_deps.values()), dry_run=dry_run, + hooks=hooks, ) @@ -292,8 +311,10 @@ def do_update( no_editable: bool = False, no_self: bool = False, prerelease: bool = False, + hooks: HookManager | None = None, ) -> None: """Update specified packages or all packages""" + hooks = hooks or HookManager(project) check_project_file(project) if len(packages) > 0 and (top or len(groups) > 1 or not default): raise PdmUsageError( @@ -323,16 +344,15 @@ def do_update( ) if not matched_name: raise ProjectError( - "{} does not exist in {} {}dependencies.".format( - termui.green(name, bold=True), group, "dev-" if dev else "" - ) + f"[bold green]{name}[/] does not exist in {group} " + f"{'dev-' if dev else ''}dependencies." ) dependencies[matched_name].prerelease = prerelease updated_deps[group][matched_name] = dependencies[matched_name] project.core.ui.echo( "Updating packages: {}.".format( ", ".join( - termui.green(v, bold=True) + f"[bold green]{v}[/]" for v in chain.from_iterable(updated_deps.values()) ) ) @@ -348,6 +368,7 @@ def do_update( chain.from_iterable(updated_deps.values()), reqs, dry_run=dry_run, + hooks=hooks, ) if sync or dry_run: do_sync( @@ -363,6 +384,7 @@ def do_update( else None, no_editable=no_editable, no_self=no_self, + hooks=hooks, ) if unconstrained and not dry_run: # Need to update version constraints @@ -382,8 +404,10 @@ def do_remove( no_editable: bool = False, no_self: bool = False, dry_run: bool = False, + hooks: HookManager | None = None, ) -> None: """Remove packages from working set and pyproject.toml""" + hooks = hooks or HookManager(project) check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") @@ -395,25 +419,23 @@ def do_remove( deps = project.get_pyproject_dependencies(group, dev) project.core.ui.echo( f"Removing packages from {group} {'dev-' if dev else ''}dependencies: " - + ", ".join(str(termui.green(name, bold=True)) for name in packages) + + ", ".join(f"[bold green]{name}[/]" for name in packages) ) for name in packages: req = parse_requirement(name) matched_indexes = sorted( - (i for i, r in enumerate(deps) if req.matches(r, False)), reverse=True + (i for i, r in enumerate(deps) if req.matches(r)), reverse=True ) if not matched_indexes: raise ProjectError( - "{} does not exist in {} dependencies.".format( - termui.green(name, bold=True), group - ) + f"[bold green]{name}[/] does not exist in {group} dependencies." ) for i in matched_indexes: del deps[i] if not dry_run: project.write_pyproject() - do_lock(project, "reuse", dry_run=dry_run) + do_lock(project, "reuse", dry_run=dry_run, hooks=hooks) if sync: do_sync( project, @@ -423,6 +445,7 @@ def do_remove( no_editable=no_editable, no_self=no_self, dry_run=dry_run, + hooks=hooks, ) @@ -434,7 +457,7 @@ def do_list( json: bool = False, ) -> None: """Display a list of packages installed in the local packages directory.""" - from pdm.cli.utils import build_dependency_graph, format_dependency_graph + from pdm.cli.utils import build_dependency_graph, show_dependency_graph check_project_file(project) working_set = project.environment.get_working_set() @@ -442,9 +465,7 @@ def do_list( dep_graph = build_dependency_graph( working_set, project.environment.marker_environment ) - project.core.ui.echo( - format_dependency_graph(project, dep_graph, reverse=reverse, json=json) - ) + show_dependency_graph(project, dep_graph, reverse=reverse, json=json) else: if reverse: raise PdmUsageError("--reverse must be used with --graph") @@ -468,7 +489,7 @@ def do_list( project.core.ui.echo("\n".join(reqs)) return rows = [ - (termui.green(k, bold=True), termui.yellow(v.version), get_dist_location(v)) + (f"[b green]{k}[/]", f"[yellow]{v.version}[/]", get_dist_location(v)) for k, v in sorted(working_set.items()) ] project.core.ui.display_columns(rows, ["Package", "Version", "Location"]) @@ -481,10 +502,13 @@ def do_build( dest: str = "dist", clean: bool = True, config_settings: Mapping[str, str] | None = None, + hooks: HookManager | None = None, ) -> None: """Build artifacts for distribution.""" from pdm.builders import SdistBuilder, WheelBuilder + hooks = hooks or HookManager(project) + if project.is_global: raise ProjectError("Not allowed to build based on the global project.") if not wheel and not sdist: @@ -494,7 +518,7 @@ def do_build( dest = project.root.joinpath(dest).as_posix() if clean: shutil.rmtree(dest, ignore_errors=True) - signals.pre_build.send(project, dest=dest, config_settings=config_settings) + hooks.try_emit("pre_build", dest=dest, config_settings=config_settings) artifacts: list[str] = [] with project.core.ui.logging("build"): if sdist: @@ -511,9 +535,7 @@ def do_build( ) project.core.ui.echo(f"Built wheel at {loc}") artifacts.append(loc) - signals.post_build.send( - project, artifacts=artifacts, config_settings=config_settings - ) + hooks.try_emit("post_build", artifacts=artifacts, config_settings=config_settings) def do_init( @@ -525,8 +547,10 @@ def do_init( author: str = "", email: str = "", python_requires: str = "", + hooks: HookManager | None = None, ) -> None: """Bootstrap the project and create a pyproject.toml""" + hooks = hooks or HookManager(project) data = { "project": { "name": name, @@ -537,7 +561,7 @@ def do_init( "dependencies": make_array([], True), }, "build-system": { - "requires": ["pdm-pep517>=0.12.0"], + "requires": ["pdm-pep517>=1.0.0"], "build-backend": "pdm.pep517.api", }, } @@ -556,7 +580,7 @@ def do_init( project._pyproject["project"] = data["project"] # type: ignore project._pyproject["build-system"] = data["build-system"] # type: ignore project.write_pyproject() - signals.post_init.send(project) + hooks.try_emit("post_init") def do_use( @@ -564,10 +588,13 @@ def do_use( python: str = "", first: bool = False, ignore_remembered: bool = False, + hooks: HookManager | None = None, ) -> None: """Use the specified python version and save in project config. The python can be a version string or interpreter path. """ + hooks = hooks or HookManager(project) + if python: python = python.strip() @@ -587,13 +614,13 @@ def version_matcher(py_version: PythonInfo) -> bool: if not cached_python.valid: project.core.ui.echo( f"The last selection is corrupted. {path!r}", - fg="red", + style="red", err=True, ) elif version_matcher(cached_python): project.core.ui.echo( "Using the last selection, add '-i' to ignore it.", - fg="yellow", + style="yellow", err=True, ) selected_python = cached_python @@ -609,7 +636,7 @@ def version_matcher(py_version: PythonInfo) -> bool: project.core.ui.echo(f" - {py.executable} ({py.identifier})", err=True) raise NoPythonVersion( "No python is found meeting the requirement " - f"{termui.green('python' + str(project.python_requires))}" + f"[green]python {str(project.python_requires)}[/]" ) if first or len(matching_interperters) == 1: selected_python = matching_interperters[0] @@ -617,13 +644,14 @@ def version_matcher(py_version: PythonInfo) -> bool: project.core.ui.echo("Please enter the Python interpreter to use") for i, py_version in enumerate(matching_interperters): project.core.ui.echo( - f"{i}. {termui.green(str(py_version.executable))} " + f"{i}. [green]{str(py_version.executable)}[/] " f"({py_version.identifier})" ) - selection = click.prompt( - "Please select:", - type=click.Choice([str(i) for i in range(len(matching_interperters))]), + selection = termui.ask( + "Please select", default="0", + prompt_type=int, + choices=[str(i) for i in range(len(matching_interperters))], show_choices=False, ) selected_python = matching_interperters[int(selection)] @@ -636,10 +664,9 @@ def version_matcher(py_version: PythonInfo) -> bool: old_python = project.python if "python.path" in project.config else None project.core.ui.echo( - "Using Python interpreter: {} ({})".format( - termui.green(str(selected_python.executable)), - selected_python.identifier, - ) + "Using Python interpreter: " + f"[green]{str(selected_python.executable)}[/] " + f"({selected_python.identifier})" ) project.python = selected_python if ( @@ -647,8 +674,9 @@ def version_matcher(py_version: PythonInfo) -> bool: and old_python.path != selected_python.path and not project.environment.is_global ): - project.core.ui.echo(termui.cyan("Updating executable scripts...")) + project.core.ui.echo("Updating executable scripts...", style="cyan") project.environment.update_shebangs(selected_python.executable.as_posix()) + hooks.try_emit("post_use", python=selected_python) def do_import( @@ -682,7 +710,13 @@ def do_import( if "tool" not in pyproject or "pdm" not in pyproject["tool"]: # type: ignore pyproject.setdefault("tool", {})["pdm"] = tomlkit.table() - + if "build" in pyproject["tool"]["pdm"] and isinstance( + pyproject["tool"]["pdm"]["build"], str + ): + pyproject["tool"]["pdm"]["build"] = { + "setup-script": pyproject["tool"]["pdm"]["build"], + "run-setuptools": True, + } if "project" not in pyproject: pyproject.add("project", tomlkit.table()) # type: ignore pyproject["project"].add( # type: ignore @@ -695,7 +729,7 @@ def do_import( merge_dictionary(pyproject["project"], project_data) # type: ignore merge_dictionary(pyproject["tool"]["pdm"], settings) # type: ignore pyproject["build-system"] = { - "requires": ["pdm-pep517"], + "requires": ["pdm-pep517>=1.0.0"], "build-backend": "pdm.pep517.api", } project.pyproject = cast(dict, pyproject) @@ -708,20 +742,18 @@ def ask_for_import(project: Project) -> None: if not importable_files: return project.core.ui.echo( - termui.cyan("Found following files from other formats that you may import:") + "Found following files from other formats that you may import:", style="cyan" ) for i, (key, filepath) in enumerate(importable_files): - project.core.ui.echo(f"{i}. {termui.green(filepath.as_posix())} ({key})") + project.core.ui.echo(f"{i}. [green]{filepath.as_posix()}[/] ({key})") project.core.ui.echo( - "{}. {}".format( - len(importable_files), - termui.yellow("don't do anything, I will import later."), - ) + f"{len(importable_files)}. [yellow]don't do anything, I will import later.[/]" ) - choice = click.prompt( - "Please select:", - type=click.Choice([str(i) for i in range(len(importable_files) + 1)]), - show_default=False, + choice = termui.ask( + "Please select", + prompt_type=int, + choices=[str(i) for i in range(len(importable_files) + 1)], + show_choices=False, ) if int(choice) == len(importable_files): return @@ -738,16 +770,14 @@ def print_pep582_command(ui: termui.UI, shell: str = "AUTO") -> None: set_env_in_reg("PYTHONPATH", PEP582_PATH) except PermissionError: ui.echo( - termui.red( - "Permission denied, please run the terminal as administrator." - ), + "Permission denied, please run the terminal as administrator.", + style="red", err=True, ) ui.echo( - termui.green( - "The environment variable has been saved, " - "please restart the session to take effect." - ) + "The environment variable has been saved, " + "please restart the session to take effect.", + style="green", ) return lib_path = PEP582_PATH.replace("'", "\\'") @@ -798,8 +828,6 @@ def print_pep582_command(ui: termui.UI, shell: str = "AUTO") -> None: def get_latest_version(project: Project) -> str | None: """Get the latest version of PDM from PyPI, cache for 7 days""" - from pdm.utils import get_finder - cache_key = hashlib.sha224(sys.executable.encode()).hexdigest() cache_file = project.cache("self-check") / cache_key if cache_file.exists(): @@ -812,10 +840,13 @@ def get_latest_version(project: Project) -> str | None: and current_time - state["last-check"] < 60 * 60 * 24 * 7 ): return cast(str, state["latest-version"]) - candidate = get_finder([], project.cache_dir.as_posix()).find_best_candidate("pdm") - if not candidate.best_candidate: + with project.environment.get_finder( + [project.default_source], ignore_compatibility=True + ) as finder: + candidate = finder.find_best_match("pdm").best + if not candidate: return None - latest_version = str(candidate.best_candidate.version) + latest_version = str(candidate.version) state.update({"latest-version": latest_version, "last-check": current_time}) cache_file.write_text(json.dumps(state)) return latest_version @@ -826,7 +857,7 @@ def check_update(project: Project) -> None: import sys from shlex import quote - from pip._vendor.packaging.version import parse as parse_version + from packaging.version import parse as parse_version from pdm.cli.utils import ( is_homebrew_installation, @@ -841,23 +872,23 @@ def check_update(project: Project) -> None: ): return if is_pipx_installation(): # pragma: no cover - install_command = "$ pipx upgrade pdm" + install_command = "pipx upgrade pdm" elif is_scoop_installation(): # pragma: no cover - install_command = "$ scoop update pdm" + install_command = "scoop update pdm" elif is_homebrew_installation(): # pragma: no cover - install_command = "$ brew upgrade pdm" + install_command = "brew upgrade pdm" else: - install_command = f"$ {quote(sys.executable)} -m pip install -U pdm" + install_command = f"{quote(sys.executable)} -m pip install -U pdm" - disable_command = "$ pdm config check_update false" + disable_command = "pdm config check_update false" message = [ - termui.blue(f"\nPDM {termui.cyan(this_version)}"), - termui.blue(f" is installed, while {termui.cyan(latest_version)}"), - termui.blue(" is available.\n"), - termui.blue(f"Please run {termui.green(install_command, bold=True)}"), - termui.blue(" to upgrade.\n"), - termui.blue(f"Run {termui.green(disable_command, bold=True)}"), - termui.blue(" to disable the check."), + f"\nPDM [cyan]{this_version}[/]", + f" is installed, while [cyan]{latest_version}[/]", + " is available.\n", + f"Please run [bold green]`{install_command}`[/]", + " to upgrade.\n", + f"Run [bold green]`{disable_command}`[/]", + " to disable the check.", ] - project.core.ui.echo("".join(message), err=True) + project.core.ui.echo("".join(message), err=True, style="blue") diff --git a/pdm/cli/commands/add.py b/pdm/cli/commands/add.py index ce8a8c0c54..3ae1bd92a6 100644 --- a/pdm/cli/commands/add.py +++ b/pdm/cli/commands/add.py @@ -2,6 +2,7 @@ from pdm.cli import actions from pdm.cli.commands.base import BaseCommand +from pdm.cli.hooks import HookManager from pdm.cli.options import ( dry_run_option, install_group, @@ -9,6 +10,7 @@ packages_group, prerelease_option, save_strategy_group, + skip_option, unconstrained_option, update_strategy_group, ) @@ -28,6 +30,7 @@ class Command(BaseCommand): packages_group, install_group, dry_run_option, + skip_option, ] def add_arguments(self, parser: argparse.ArgumentParser) -> None: @@ -66,4 +69,5 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: no_self=options.no_self, dry_run=options.dry_run, prerelease=options.prerelease, + hooks=HookManager(project, options.skip), ) diff --git a/pdm/cli/commands/build.py b/pdm/cli/commands/build.py index 37e8e2d910..bf04735e30 100644 --- a/pdm/cli/commands/build.py +++ b/pdm/cli/commands/build.py @@ -1,17 +1,21 @@ import argparse -from pdm import signals from pdm.cli import actions from pdm.cli.commands.base import BaseCommand -from pdm.cli.commands.run import run_script_if_present -from pdm.cli.options import no_isolation_option, project_option, verbose_option +from pdm.cli.hooks import HookManager +from pdm.cli.options import ( + no_isolation_option, + project_option, + skip_option, + verbose_option, +) from pdm.project import Project class Command(BaseCommand): """Build artifacts for distribution""" - arguments = [verbose_option, project_option, no_isolation_option] + arguments = [verbose_option, project_option, no_isolation_option, skip_option] def add_arguments(self, parser: argparse.ArgumentParser) -> None: parser.add_argument( @@ -66,8 +70,5 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: dest=options.dest, clean=options.clean, config_settings=config_settings, + hooks=HookManager(project, options.skip), ) - - -signals.pre_build.connect(run_script_if_present("pre_build"), weak=False) -signals.post_build.connect(run_script_if_present("post_build"), weak=False) diff --git a/pdm/cli/commands/cache.py b/pdm/cli/commands/cache.py index c3a0d87803..b7fc2fbeb3 100644 --- a/pdm/cli/commands/cache.py +++ b/pdm/cli/commands/cache.py @@ -7,7 +7,6 @@ from pdm.cli.commands.base import BaseCommand from pdm.cli.options import verbose_option from pdm.exceptions import PdmUsageError -from pdm.models.pip_shims import directory_size, file_size, find_files from pdm.project import Project @@ -29,6 +28,22 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: self.parser.print_help() +def file_size(file: Path) -> int: + if file.is_symlink(): + return 0 + return os.path.getsize(file) + + +def find_files(parent: Path, pattern: str) -> Iterable[Path]: + for file in parent.rglob(pattern): + if file.is_file() or file.is_symlink(): + yield file + + +def directory_size(directory: Path) -> int: + return sum(map(file_size, find_files(directory, "*"))) + + def format_size(size: float) -> str: if size > 1000 * 1000: return "{:.1f} MB".format(size / 1000.0 / 1000) @@ -44,18 +59,15 @@ def remove_cache_files(project: Project, pattern: str) -> None: if not pattern: raise PdmUsageError("Please provide a pattern") - if pattern == "*": - files = list(find_files(project.cache_dir.as_posix(), pattern)) - else: - # Only remove wheel files which specific pattern is given - files = list(find_files(project.cache("wheels").as_posix(), pattern)) + wheel_cache = project.cache("wheels") + files = list(find_files(wheel_cache, pattern)) if not files: raise PdmUsageError("No matching files found") for file in files: os.unlink(file) - project.core.ui.echo(f"Removed {file}", verbosity=termui.DETAIL) + project.core.ui.echo(f"Removed {file}", verbosity=termui.Verbosity.DETAIL) project.core.ui.echo(f"{len(files)} file{'s' if len(files) > 1 else ''} removed") @@ -89,15 +101,16 @@ def _clear_packages(root: Path) -> int: @staticmethod def _clear_files(root: Path) -> int: - files = find_files(root.as_posix(), "*") + files = list(find_files(root, "*")) for file in files: os.unlink(file) return len(files) def handle(self, project: Project, options: argparse.Namespace) -> None: + types: Iterable[str] = () if not options.type: - types: Iterable[str] = self.CACHE_TYPES + pass elif options.type not in self.CACHE_TYPES: raise PdmUsageError( f"Invalid cache type {options.type}, should one of {self.CACHE_TYPES}" @@ -108,12 +121,15 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: packages = files = 0 with project.core.ui.open_spinner( f"Clearing {options.type or 'all'} caches..." - ) as spinner: - for type_ in types: - if type_ == "packages": - packages += self._clear_packages(project.cache(type_)) - else: - files += self._clear_files(project.cache(type_)) + ): + if not options.type: + packages, files = 0, self._clear_files(project.cache_dir) + else: + for type_ in types: + if type_ == "packages": + packages += self._clear_packages(project.cache(type_)) + else: + files += self._clear_files(project.cache(type_)) message = [] if packages: message.append(f"{packages} package{'s' if packages > 1 else ''}") @@ -123,7 +139,7 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: text = "No files need to be removed" else: text = f"{' and '.join(message)} are removed" - spinner.succeed(text) + project.core.ui.echo(text) class RemoveCommand(BaseCommand): @@ -150,8 +166,8 @@ def add_arguments(self, parser: argparse.ArgumentParser) -> None: def handle(self, project: Project, options: argparse.Namespace) -> None: rows = [ - (format_size(file_size(file)), os.path.basename(file)) - for file in find_files(project.cache("wheels").as_posix(), options.pattern) + (format_size(file_size(file)), file.name) + for file in find_files(project.cache("wheels"), options.pattern) ] project.core.ui.display_columns(rows, [">Size", "Filename"]) @@ -164,20 +180,20 @@ class InfoCommand(BaseCommand): def handle(self, project: Project, options: argparse.Namespace) -> None: with project.core.ui.open_spinner("Calculating cache files"): output = [ - f"{termui.cyan('Cache Root')}: {project.cache_dir}, " - f"Total size: {format_size(directory_size(str(project.cache_dir)))}" + f"[cyan]Cache Root[/]: {project.cache_dir}, " + f"Total size: {format_size(directory_size(project.cache_dir))}" ] for name, description in [ - ("hashes", "File Hashe Cache"), + ("hashes", "File Hash Cache"), ("http", "HTTP Cache"), ("wheels", "Wheels Cache"), ("metadata", "Metadata Cache"), ("packages", "Package Cache"), ]: cache_location = project.cache(name) - files = list(find_files(cache_location.as_posix(), "*")) - size = directory_size(cache_location.as_posix()) - output.append(f" {termui.cyan(description)}: {cache_location}") + files = list(find_files(cache_location, "*")) + size = directory_size(cache_location) + output.append(f" [cyan]{description}[/]: {cache_location}") output.append(f" Files: {len(files)}, Size: {format_size(size)}") project.core.ui.echo("\n".join(output)) diff --git a/pdm/cli/commands/config.py b/pdm/cli/commands/config.py index 30ff4da972..24ff24ee1b 100644 --- a/pdm/cli/commands/config.py +++ b/pdm/cli/commands/config.py @@ -37,11 +37,15 @@ def _get_config(self, project: Project, options: argparse.Namespace) -> None: project.core.ui.echo( "DEPRECATED: the config has been renamed to " f"{project.project_config.deprecated[options.key]}", - fg="yellow", + style="yellow", err=True, ) options.key = project.project_config.deprecated[options.key] - project.core.ui.echo(project.config[options.key]) + if options.key.split(".")[0] == "repository": + value = project.global_config[options.key] + else: + value = project.config[options.key] + project.core.ui.echo(value) def _set_config(self, project: Project, options: argparse.Namespace) -> None: config = project.project_config if options.local else project.global_config @@ -49,7 +53,7 @@ def _set_config(self, project: Project, options: argparse.Namespace) -> None: project.core.ui.echo( "DEPRECATED: the config has been renamed to " f"{config.deprecated[options.key]}", - fg="yellow", + style="yellow", err=True, ) config[options.key] = options.value @@ -59,25 +63,32 @@ def _show_config(self, config: Config, ui: termui.UI) -> None: config_item = config._config_map[key] deprecated = "" if config_item.replace and config_item.replace in config._data: - deprecated = termui.red(f"(deprecating: {config_item.replace})") + deprecated = f"[red](deprecating: {config_item.replace})[/]" ui.echo( - termui.yellow("# " + config_item.description), - verbosity=termui.DETAIL, + f"# {config_item.description}", + style="yellow", + verbosity=termui.Verbosity.DETAIL, ) - ui.echo(f"{termui.cyan(key)}{deprecated} = {config[key]}") + ui.echo(f"[cyan]{key}[/]{deprecated} = {config[key]}") def _list_config(self, project: Project, options: argparse.Namespace) -> None: ui = project.core.ui - ui.echo("Home configuration ({}):".format(project.global_config.config_file)) - with ui.indent(" "): - self._show_config(project.global_config, ui) + ui.echo( + "Home configuration ([green]{}[/]):".format( + project.global_config.config_file + ), + style="bold", + ) + self._show_config(project.global_config, ui) ui.echo() ui.echo( - "Project configuration ({}):".format(project.project_config.config_file) + "Project configuration ([green]{}[/]):".format( + project.project_config.config_file + ), + style="bold", ) - with ui.indent(" "): - self._show_config(project.project_config, ui) + self._show_config(project.project_config, ui) def _delete_config(self, project: Project, options: argparse.Namespace) -> None: config = project.project_config if options.local else project.global_config @@ -85,7 +96,7 @@ def _delete_config(self, project: Project, options: argparse.Namespace) -> None: project.core.ui.echo( "DEPRECATED: the config has been renamed to " f"{config.deprecated[options.key]}", - fg="yellow", + style="yellow", err=True, ) del config[options.key] diff --git a/pdm/cli/commands/export.py b/pdm/cli/commands/export.py index 847b8cb471..eb99bc2fd7 100644 --- a/pdm/cli/commands/export.py +++ b/pdm/cli/commands/export.py @@ -65,7 +65,7 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: project.core.ui.echo( "The exported requirements file is no longer cross-platform. " "Using it on other platforms may cause unexpected result.", - fg="yellow", + style="yellow", err=True, ) candidates = resolve_candidates_from_lockfile( diff --git a/pdm/cli/commands/info.py b/pdm/cli/commands/info.py index 37928e5b05..c8e053b1ff 100644 --- a/pdm/cli/commands/info.py +++ b/pdm/cli/commands/info.py @@ -1,7 +1,6 @@ import argparse import json -from pdm import termui from pdm.cli.commands.base import BaseCommand from pdm.cli.options import ArgumentGroup from pdm.cli.utils import check_project_file @@ -45,16 +44,21 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: ) else: - rows = [ - (termui.cyan("PDM version:", bold=True), project.core.version), - ( - termui.cyan("Python Interpreter:", bold=True), + for name, value in zip( + [ + f"[bold cyan]{key}[/]:" + for key in [ + "PDM version", + "Python Interpreter", + "Project Root", + "Project Packages", + ] + ], + [ + project.core.version, f"{interpreter.executable} ({interpreter.identifier})", - ), - (termui.cyan("Project Root:", bold=True), project.root.as_posix()), - ( - termui.cyan("Project Packages:", bold=True), + project.root.as_posix(), str(project.environment.packages_path), - ), - ] - project.core.ui.display_columns(rows) + ], + ): + project.core.ui.echo(f"{name}\n {value}") diff --git a/pdm/cli/commands/init.py b/pdm/cli/commands/init.py index 3f858bcd76..5293b45428 100644 --- a/pdm/cli/commands/init.py +++ b/pdm/cli/commands/init.py @@ -1,11 +1,10 @@ import argparse -import click - -from pdm import signals, termui +from pdm import termui from pdm.cli import actions from pdm.cli.commands.base import BaseCommand -from pdm.cli.commands.run import run_script_if_present +from pdm.cli.hooks import HookManager +from pdm.cli.options import skip_option from pdm.project import Project from pdm.utils import get_user_email_from_git @@ -23,9 +22,10 @@ def set_interactive(self, value: bool) -> None: def ask(self, question: str, default: str) -> str: if not self.interactive: return default - return click.prompt(question, default=default) + return termui.ask(question, default=default) def add_arguments(self, parser: argparse.ArgumentParser) -> None: + skip_option.add_to_parser(parser) parser.add_argument( "-n", "--non-interactive", @@ -35,24 +35,23 @@ def add_arguments(self, parser: argparse.ArgumentParser) -> None: parser.set_defaults(search_parent=False) def handle(self, project: Project, options: argparse.Namespace) -> None: + hooks = HookManager(project, options.skip) if project.pyproject_file.exists(): project.core.ui.echo( - "{}".format( - termui.cyan("pyproject.toml already exists, update it now.") - ) + "pyproject.toml already exists, update it now.", style="cyan" ) else: - project.core.ui.echo( - "{}".format(termui.cyan("Creating a pyproject.toml for PDM...")) - ) + project.core.ui.echo("Creating a pyproject.toml for PDM...", style="cyan") self.set_interactive(not options.non_interactive) if self.interactive: - actions.do_use(project) + actions.do_use(project, hooks=hooks) else: - actions.do_use(project, "3", True) + actions.do_use(project, "3", True, hooks=hooks) is_library = ( - click.confirm("Is the project a library that will be uploaded to PyPI?") + termui.confirm( + "Is the project a library that will be uploaded to PyPI", default=False + ) if self.interactive else False ) @@ -81,9 +80,7 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: author=author, email=email, python_requires=python_requires, + hooks=hooks, ) if self.interactive: actions.ask_for_import(project) - - -signals.post_init.connect(run_script_if_present("post_init"), weak=False) diff --git a/pdm/cli/commands/install.py b/pdm/cli/commands/install.py index 586c257b70..ca0090f829 100644 --- a/pdm/cli/commands/install.py +++ b/pdm/cli/commands/install.py @@ -1,13 +1,17 @@ import argparse import sys -import click - -from pdm import signals +from pdm import termui from pdm.cli import actions from pdm.cli.commands.base import BaseCommand -from pdm.cli.commands.run import run_script_if_present -from pdm.cli.options import dry_run_option, groups_group, install_group, lockfile_option +from pdm.cli.hooks import HookManager +from pdm.cli.options import ( + dry_run_option, + groups_group, + install_group, + lockfile_option, + skip_option, +) from pdm.project import Project @@ -19,6 +23,7 @@ def add_arguments(self, parser: argparse.ArgumentParser) -> None: install_group.add_to_parser(parser) dry_run_option.add_to_parser(parser) lockfile_option.add_to_parser(parser) + skip_option.add_to_parser(parser) parser.add_argument( "--no-lock", dest="lock", @@ -33,19 +38,25 @@ def add_arguments(self, parser: argparse.ArgumentParser) -> None: ) def handle(self, project: Project, options: argparse.Namespace) -> None: - if not project.meta and click._compat.isatty(sys.stdout): + if not project.meta and termui.is_interactive(): actions.ask_for_import(project) + hooks = HookManager(project, options.skip) + strategy = actions.check_lockfile(project, False) if strategy: if options.check: project.core.ui.echo( - "Please run `pdm lock` to update the lock file", err=True + "Please run [green]`pdm lock`[/] to update the lock file", err=True ) sys.exit(1) if options.lock: - project.core.ui.echo("Updating the lock file...", fg="green", err=True) - actions.do_lock(project, strategy=strategy, dry_run=options.dry_run) + project.core.ui.echo( + "Updating the lock file...", style="green", err=True + ) + actions.do_lock( + project, strategy=strategy, dry_run=options.dry_run, hooks=hooks + ) actions.do_sync( project, @@ -55,8 +66,5 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: no_editable=options.no_editable, no_self=options.no_self, dry_run=options.dry_run, + hooks=hooks, ) - - -signals.pre_install.connect(run_script_if_present("pre_install"), weak=False) -signals.post_install.connect(run_script_if_present("post_install"), weak=False) diff --git a/pdm/cli/commands/lock.py b/pdm/cli/commands/lock.py index e9c4a8d9a8..e680bf36a2 100644 --- a/pdm/cli/commands/lock.py +++ b/pdm/cli/commands/lock.py @@ -1,17 +1,20 @@ import argparse -from pdm import signals from pdm.cli import actions from pdm.cli.commands.base import BaseCommand -from pdm.cli.commands.run import run_script_if_present -from pdm.cli.options import lockfile_option, no_isolation_option +from pdm.cli.hooks import HookManager +from pdm.cli.options import lockfile_option, no_isolation_option, skip_option from pdm.project import Project class Command(BaseCommand): """Resolve and lock dependencies""" - arguments = BaseCommand.arguments + [lockfile_option, no_isolation_option] + arguments = BaseCommand.arguments + [ + lockfile_option, + no_isolation_option, + skip_option, + ] def add_arguments(self, parser: argparse.ArgumentParser) -> None: parser.add_argument( @@ -21,8 +24,8 @@ def add_arguments(self, parser: argparse.ArgumentParser) -> None: ) def handle(self, project: Project, options: argparse.Namespace) -> None: - actions.do_lock(project, refresh=options.refresh) - - -signals.pre_lock.connect(run_script_if_present("pre_lock"), weak=False) -signals.post_lock.connect(run_script_if_present("post_lock"), weak=False) + actions.do_lock( + project, + refresh=options.refresh, + hooks=HookManager(project, options.skip), + ) diff --git a/pdm/cli/commands/plugin.py b/pdm/cli/commands/plugin.py index f5c110c5b5..ef785cac2f 100644 --- a/pdm/cli/commands/plugin.py +++ b/pdm/cli/commands/plugin.py @@ -6,23 +6,17 @@ import subprocess import sys -import click +from pip import __file__ as pip_location from pdm import termui from pdm.cli.commands.base import BaseCommand from pdm.cli.options import verbose_option from pdm.cli.utils import Package, build_dependency_graph +from pdm.compat import importlib_metadata from pdm.models.environment import WorkingSet from pdm.project import Project from pdm.utils import normalize_name -if sys.version_info >= (3, 8): - import importlib.metadata as importlib_metadata -else: - import importlib_metadata - -from pip import __file__ as pip_location - def _all_plugins() -> list[str]: result: set[str] = set() @@ -72,7 +66,7 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: for plugin in plugins: metadata = importlib_metadata.metadata(plugin) echo( - f"{termui.green(metadata['Name'])} {termui.yellow(metadata['Version'])}" + f"[green]{metadata['Name']}[/] [yellow]{metadata['Version']}[/]", ) if metadata["Summary"]: echo(f" {metadata['Summary']}") @@ -101,18 +95,20 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: pip_args = ["install"] + shlex.split(options.pip_args) + options.packages project.core.ui.echo( - f"Running pip command: {pip_args}", verbosity=termui.DETAIL + f"Running pip command: {pip_args}", verbosity=termui.Verbosity.DETAIL ) - with project.core.ui.open_spinner( - f"Installing plugins: {options.packages}" - ) as spinner: - try: + try: + with project.core.ui.open_spinner( + f"Installing plugins: {options.packages}" + ): run_pip(pip_args) - except subprocess.CalledProcessError as e: - spinner.fail("Installation failed: \n" + e.output.decode("utf8")) - sys.exit(1) - else: - spinner.succeed("Installation succeeds.") + except subprocess.CalledProcessError as e: + project.core.ui.echo( + "Installation failed: \n" + e.output.decode("utf8"), err=True + ) + sys.exit(1) + else: + project.core.ui.echo("Installation succeeds.") class RemoveCommand(BaseCommand): @@ -170,7 +166,7 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: sys.exit(1) if not ( options.yes - or click.confirm(f"Will remove: {packages_to_remove}, continue?") + or termui.confirm(f"Will remove: {packages_to_remove}, continue?") ): return pip_args = ( @@ -178,15 +174,17 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: ) project.core.ui.echo( - f"Running pip command: {pip_args}", verbosity=termui.DETAIL + f"Running pip command: {pip_args}", verbosity=termui.Verbosity.DETAIL ) - with project.core.ui.open_spinner( - f"Uninstalling plugins: {valid_packages}" - ) as spinner: - try: + try: + with project.core.ui.open_spinner( + f"Uninstalling plugins: {valid_packages}" + ): run_pip(pip_args) - except subprocess.CalledProcessError as e: - spinner.fail("Uninstallation failed: \n" + e.output.decode("utf8")) - sys.exit(1) - else: - spinner.succeed("Uninstallation succeeds.") + except subprocess.CalledProcessError as e: + project.core.ui.echo( + "Uninstallation failed: \n" + e.output.decode("utf8"), err=True + ) + sys.exit(1) + else: + project.core.ui.echo("Uninstallation succeeds.") diff --git a/pdm/cli/commands/publish/__init__.py b/pdm/cli/commands/publish/__init__.py new file mode 100644 index 0000000000..82eacecbef --- /dev/null +++ b/pdm/cli/commands/publish/__init__.py @@ -0,0 +1,172 @@ +from __future__ import annotations + +import argparse +import os + +import requests +from rich.progress import ( + BarColumn, + DownloadColumn, + TimeRemainingColumn, + TransferSpeedColumn, +) + +from pdm.cli import actions +from pdm.cli.commands.base import BaseCommand +from pdm.cli.commands.publish.package import PackageFile +from pdm.cli.commands.publish.repository import Repository +from pdm.cli.hooks import HookManager +from pdm.cli.options import project_option, skip_option, verbose_option +from pdm.exceptions import PdmUsageError, PublishError +from pdm.project import Project +from pdm.termui import logger + + +class Command(BaseCommand): + """Build and publish the project to PyPI""" + + arguments = [verbose_option, project_option, skip_option] + + def add_arguments(self, parser: argparse.ArgumentParser) -> None: + parser.add_argument( + "-r", + "--repository", + help="The repository name or url to publish the package to" + " [env var: PDM_PUBLISH_REPO]", + ) + parser.add_argument( + "-u", + "--username", + help="The username to access the repository" + " [env var: PDM_PUBLISH_USERNAME]", + ) + parser.add_argument( + "-P", + "--password", + help="The password to access the repository" + " [env var: PDM_PUBLISH_PASSWORD]", + ) + parser.add_argument( + "-S", + "--sign", + action="store_true", + help="Upload the package with PGP signature", + ) + parser.add_argument( + "-i", + "--identity", + help="GPG identity used to sign files.", + ) + parser.add_argument( + "-c", + "--comment", + help="The comment to include with the distribution file.", + ) + parser.add_argument( + "--no-build", + action="store_false", + dest="build", + help="Don't build the package before publishing", + ) + + @staticmethod + def _make_package( + filename: str, signatures: dict[str, str], options: argparse.Namespace + ) -> PackageFile: + p = PackageFile.from_filename(filename, options.comment) + if p.base_filename in signatures: + p.add_gpg_signature(signatures[p.base_filename], p.base_filename + ".asc") + elif options.sign: + p.sign(options.identity) + return p + + @staticmethod + def _check_response(response: requests.Response) -> None: + message = "" + if response.status_code == 410 and "pypi.python.org" in response.url: + message = ( + "Uploading to these sites is deprecated. " + "Try using https://upload.pypi.org/legacy/ " + "(or https://test.pypi.org/legacy/) instead." + ) + elif response.status_code == 405 and "pypi.org" in response.url: + message = ( + "It appears you're trying to upload to pypi.org but have an " + "invalid URL." + ) + else: + try: + response.raise_for_status() + except requests.HTTPError as err: + message = str(err) + if message: + raise PublishError(message) + + @staticmethod + def get_repository(project: Project, options: argparse.Namespace) -> Repository: + repository = options.repository or os.getenv("PDM_PUBLISH_REPO", "pypi") + username = options.username or os.getenv("PDM_PUBLISH_USERNAME") + password = options.password or os.getenv("PDM_PUBLISH_PASSWORD") + + config = project.global_config.get_repository_config(repository) + if config is None: + raise PdmUsageError(f"Missing repository config of {repository}") + if username is not None: + config.username = username + if password is not None: + config.password = password + return Repository(project, config.url, config.username, config.password) + + def handle(self, project: Project, options: argparse.Namespace) -> None: + hooks = HookManager(project, options.skip) + + hooks.try_emit("pre_publish") + + if options.build: + actions.do_build(project, hooks=hooks) + + package_files = [ + str(p) + for p in project.root.joinpath("dist").iterdir() + if not p.name.endswith(".asc") + ] + signatures = { + p.stem: str(p) + for p in project.root.joinpath("dist").iterdir() + if p.name.endswith(".asc") + } + + repository = self.get_repository(project, options) + uploaded: list[PackageFile] = [] + with project.core.ui.make_progress( + " [progress.percentage]{task.percentage:>3.0f}%", + BarColumn(), + DownloadColumn(), + "•", + TimeRemainingColumn( + compact=True, + elapsed_when_finished=True, + ), + "•", + TransferSpeedColumn(), + ) as progress, project.core.ui.logging("publish"): + packages = sorted( + (self._make_package(p, signatures, options) for p in package_files), + # Upload wheels first if they exist. + key=lambda p: not p.base_filename.endswith(".whl"), + ) + for package in packages: + resp = repository.upload(package, progress) + logger.debug( + "Response from %s:\n%s %s", resp.url, resp.status_code, resp.reason + ) + self._check_response(resp) + uploaded.append(package) + + release_urls = repository.get_release_urls(uploaded) + if release_urls: + project.core.ui.echo("\n[green]View at:") + for url in release_urls: + project.core.ui.echo(url) + + hooks.try_emit("post_publish") diff --git a/pdm/cli/commands/publish/package.py b/pdm/cli/commands/publish/package.py new file mode 100644 index 0000000000..47aba82814 --- /dev/null +++ b/pdm/cli/commands/publish/package.py @@ -0,0 +1,206 @@ +from __future__ import annotations + +import email +import email.message +import hashlib +import os +import re +import subprocess +import tarfile +import zipfile +from dataclasses import dataclass +from typing import IO, Any, cast + +from unearth.preparer import has_leading_dir, split_leading_dir + +from pdm.exceptions import PdmUsageError, ProjectError +from pdm.termui import logger +from pdm.utils import normalize_name + +DIST_EXTENSIONS = { + ".whl": "bdist_wheel", + ".tar.bz2": "sdist", + ".tar.gz": "sdist", + ".zip": "sdist", +} +wheel_file_re = re.compile( + r"""^(?P(?P.+?)(-(?P\d.+?))?) + ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?) + \.whl|\.dist-info)$""", + re.VERBOSE, +) + + +@dataclass +class PackageFile: + """A distribution file for upload. + + XXX: currently only supports sdist and wheel. + """ + + filename: str + metadata: email.message.Message + comment: str | None + py_version: str | None + filetype: str + + def __post_init__(self) -> None: + self.base_filename = os.path.basename(self.filename) + self.gpg_signature: tuple[str, bytes] | None = None + + def get_hashes(self) -> dict[str, str]: + hashers = {"sha256_digest": hashlib.sha256()} + try: + hashers["md5_digest"] = hashlib.md5() + except ValueError: + pass + try: + hashers["blake2_256_digest"] = hashlib.blake2b(digest_size=256 // 8) + except (TypeError, ValueError): + pass + with open(self.filename, "rb") as f: + for chunk in iter(lambda: f.read(8192), b""): + for hasher in hashers.values(): + hasher.update(chunk) + return {k: v.hexdigest() for k, v in hashers.items()} + + @classmethod + def from_filename(cls, filename: str, comment: str | None) -> PackageFile: + filetype = "" + for ext, dtype in DIST_EXTENSIONS.items(): + if filename.endswith(ext): + filetype = dtype + break + else: + raise PdmUsageError(f"Unknown distribution file type: {filename}") + if filetype == "bdist_wheel": + metadata = cls.read_metadata_from_wheel(filename) + match = wheel_file_re.match(os.path.basename(filename)) + if match is None: + py_ver = "any" + else: + py_ver = match.group("pyver") + elif filename.endswith(".zip"): + metadata = cls.read_metadata_from_zip(filename) + py_ver = "source" + else: + metadata = cls.read_metadata_from_tar(filename) + py_ver = "source" + return cls(filename, metadata, comment, py_ver, filetype) + + @staticmethod + def read_metadata_from_tar(filename: str) -> email.message.Message: + if filename.endswith(".gz"): + mode = "r:gz" + elif filename.endswith(".bz2"): + mode = "r:bz2" + else: + logger.warning(f"Can't determine the compression mode for {filename}") + mode = "r:*" + with tarfile.open(filename, mode) as tar: + members = tar.getmembers() + has_leading = has_leading_dir(m.name for m in members) + for m in members: + fn = split_leading_dir(m.name)[1] if has_leading else m.name + if fn == "PKG-INFO": + return email.message_from_binary_file( + cast(IO[bytes], tar.extractfile(m)) + ) + raise ProjectError(f"No PKG-INFO found in {filename}") + + @staticmethod + def read_metadata_from_zip(filename: str) -> email.message.Message: + with zipfile.ZipFile(filename, allowZip64=True) as zip: + filenames = zip.namelist() + has_leading = has_leading_dir(filenames) + for name in filenames: + fn = split_leading_dir(name)[1] if has_leading else name + if fn == "PKG-INFO": + return email.message_from_binary_file(zip.open(name)) + raise ProjectError(f"No PKG-INFO found in {filename}") + + @staticmethod + def read_metadata_from_wheel(filename: str) -> email.message.Message: + with zipfile.ZipFile(filename, allowZip64=True) as zip: + for fn in zip.namelist(): + if fn.replace("\\", "/").endswith(".dist-info/METADATA"): + return email.message_from_binary_file(zip.open(fn)) + raise ProjectError(f"No egg-info is found in {filename}") + + def add_gpg_signature(self, filename: str, signature_name: str) -> None: + if self.gpg_signature is not None: + raise PdmUsageError("GPG signature already added") + with open(filename, "rb") as f: + self.gpg_signature = (signature_name, f.read()) + + def sign(self, identity: str | None) -> None: + logger.info("Signing %s with gpg", self.base_filename) + gpg_args = ["gpg", "--detach-sign"] + if identity is not None: + gpg_args.extend(["--local-user", identity]) + gpg_args.extend(["-a", self.filename]) + self._run_gpg(gpg_args) + self.add_gpg_signature(self.filename + ".asc", self.base_filename + ".asc") + + @staticmethod + def _run_gpg(gpg_args: list[str]) -> None: + try: + subprocess.run(gpg_args, check=True) + return + except FileNotFoundError: + logger.warning("gpg executable not available. Attempting fallback to gpg2.") + + gpg_args[0] = "gpg2" + try: + subprocess.run(gpg_args, check=True) + except FileNotFoundError: + raise PdmUsageError( + "'gpg' or 'gpg2' executables not available.\n" + "Try installing one of these or specifying an executable " + "with the --sign-with flag." + ) + + @property + def metadata_dict(self) -> dict[str, Any]: + meta = self.metadata + data = { + # identify release + "name": normalize_name(meta["Name"]), + "version": meta["Version"], + # file content + "filetype": self.filetype, + "pyversion": self.py_version, + # additional meta-data + "metadata_version": meta["Metadata-Version"], + "summary": meta["Summary"], + "home_page": meta["Home-page"], + "author": meta["Author"], + "author_email": meta["Author-email"], + "maintainer": meta["Maintainer"], + "maintainer_email": meta["Maintainer-email"], + "license": meta["License"], + "description": meta.get_payload(), + "keywords": meta["Keywords"], + "platform": meta.get_all("Platform") or (), + "classifiers": meta.get_all("Classifier") or [], + "download_url": meta["Download-URL"], + "supported_platform": meta.get_all("Supported-Platform") or (), + "comment": self.comment, + # Metadata 1.2 + "project_urls": meta.get_all("Project-URL") or (), + "provides_dist": meta.get_all("Provides-Dist") or (), + "obsoletes_dist": meta.get_all("Obsoletes-Dist") or (), + "requires_dist": meta.get_all("Requires-Dist") or (), + "requires_external": meta.get_all("Requires-External") or (), + "requires_python": meta.get_all("Requires-Python") or (), + # Metadata 2.1 + "provides_extras": meta.get_all("Provides-Extra") or (), + "description_content_type": meta.get("Description-Content-Type"), + # Metadata 2.2 + "dynamic": meta.get_all("Dynamic") or (), + # Hashes + **self.get_hashes(), + } + if self.gpg_signature is not None: + data["gpg_signature"] = self.gpg_signature + return data diff --git a/pdm/cli/commands/publish/repository.py b/pdm/cli/commands/publish/repository.py new file mode 100644 index 0000000000..ccaa5f64ff --- /dev/null +++ b/pdm/cli/commands/publish/repository.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +import atexit +from typing import Any, Iterable + +import requests +import requests_toolbelt +import rich.progress + +from pdm.cli.commands.publish.package import PackageFile +from pdm.models.session import PDMSession +from pdm.project import Project +from pdm.project.config import DEFAULT_REPOSITORIES + + +class Repository: + def __init__( + self, project: Project, url: str, username: str | None, password: str | None + ) -> None: + self.url = url + self.session = PDMSession(cache_dir=project.cache("http")) + self.session.auth = ( + (username or "", password or "") if username or password else None + ) + self.ui = project.core.ui + + atexit.register(self.session.close) + + @staticmethod + def _convert_to_list_of_tuples(data: dict[str, Any]) -> list[tuple[str, Any]]: + result: list[tuple[str, Any]] = [] + for key, value in data.items(): + if isinstance(value, (list, tuple)) and key != "gpg_signature": + for item in value: + result.append((key, item)) + else: + result.append((key, value)) + return result + + def get_release_urls(self, packages: list[PackageFile]) -> Iterable[str]: + if self.url.startswith(DEFAULT_REPOSITORIES["pypi"].url.rstrip("/")): + base = "https://pypi.org/" + elif self.url.startswith(DEFAULT_REPOSITORIES["testpypi"].url.rstrip("/")): + base = "https://test.pypi.org/" + else: + return set() + return { + f"{base}project/{package.metadata['name']}/{package.metadata['version']}/" + for package in packages + } + + def upload( + self, package: PackageFile, progress: rich.progress.Progress + ) -> requests.Response: + payload = package.metadata_dict + payload.update( + { + ":action": "file_upload", + "protocol_version": "1", + } + ) + field_parts = self._convert_to_list_of_tuples(payload) + + progress.live.console.print(f"Uploading [green]{package.base_filename}") + + with open(package.filename, "rb") as fp: + field_parts.append( + ("content", (package.base_filename, fp, "application/octet-stream")) + ) + + def on_upload(monitor: requests_toolbelt.MultipartEncoderMonitor) -> None: + progress.update(job, completed=monitor.bytes_read) + + monitor = requests_toolbelt.MultipartEncoderMonitor.from_fields( + field_parts, callback=on_upload + ) + job = progress.add_task("", total=monitor.len) + return self.session.post( + self.url, + data=monitor, + headers={"Content-Type": monitor.content_type}, + allow_redirects=False, + ) diff --git a/pdm/cli/commands/remove.py b/pdm/cli/commands/remove.py index 8dfd65dd59..adf79cd4ae 100644 --- a/pdm/cli/commands/remove.py +++ b/pdm/cli/commands/remove.py @@ -2,7 +2,8 @@ from pdm.cli import actions from pdm.cli.commands.base import BaseCommand -from pdm.cli.options import dry_run_option, install_group, lockfile_option +from pdm.cli.hooks import HookManager +from pdm.cli.options import dry_run_option, install_group, lockfile_option, skip_option from pdm.project import Project @@ -13,6 +14,7 @@ def add_arguments(self, parser: argparse.ArgumentParser) -> None: install_group.add_to_parser(parser) dry_run_option.add_to_parser(parser) lockfile_option.add_to_parser(parser) + skip_option.add_to_parser(parser) parser.add_argument( "-d", "--dev", @@ -44,4 +46,5 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: no_editable=options.no_editable, no_self=options.no_self, dry_run=options.dry_run, + hooks=HookManager(project, options.skip), ) diff --git a/pdm/cli/commands/run.py b/pdm/cli/commands/run.py index 81d648bb9a..be49d0efcc 100644 --- a/pdm/cli/commands/run.py +++ b/pdm/cli/commands/run.py @@ -9,11 +9,13 @@ import sys from typing import Any, Callable, Mapping, NamedTuple, Sequence, cast -from pdm import termui -from pdm._types import TypedDict +from pdm import signals, termui from pdm.cli.actions import PEP582_PATH from pdm.cli.commands.base import BaseCommand +from pdm.cli.hooks import KNOWN_HOOKS, HookManager +from pdm.cli.options import skip_option from pdm.cli.utils import check_project_file +from pdm.compat import TypedDict from pdm.exceptions import PdmUsageError from pdm.project import Project from pdm.utils import is_path_relative_to @@ -26,6 +28,19 @@ class TaskOptions(TypedDict, total=False): site_packages: bool +def exec_opts(*options: TaskOptions | None) -> dict[str, Any]: + return dict( + env={k: v for opts in options if opts for k, v in opts.get("env", {}).items()}, + **{ + k: v + for opts in options + if opts + for k, v in opts.items() + if k not in ("env", "help") + }, + ) + + class Task(NamedTuple): kind: str name: str @@ -33,22 +48,39 @@ class Task(NamedTuple): options: TaskOptions def __str__(self) -> str: - return f"" + return f"" + + @property + def short_description(self) -> str: + """ + A short one line task description + """ + if self.kind == "composite": + fallback = f" {termui.Emoji.ARROW_SEPARATOR} ".join(self.args) + else: + lines = [ + line.strip() for line in str(self.args).splitlines() if line.strip() + ] + fallback = ( + f"{lines[0]}{termui.Emoji.ELLIPSIS}" if len(lines) > 1 else lines[0] + ) + return self.options.get("help", fallback) class TaskRunner: """The task runner for pdm project""" - TYPES = ["cmd", "shell", "call"] + TYPES = ["cmd", "shell", "call", "composite"] OPTIONS = ["env", "env_file", "help", "site_packages"] - def __init__(self, project: Project) -> None: + def __init__(self, project: Project, hooks: HookManager) -> None: self.project = project global_options = cast( TaskOptions, self.project.scripts.get("_", {}) if self.project.scripts else {}, ) self.global_options = global_options.copy() + self.hooks = hooks def _get_task(self, script_name: str) -> Task | None: if script_name not in self.project.scripts: @@ -113,9 +145,9 @@ def _run_process( import dotenv project.core.ui.echo( - f"Loading .env file: {termui.green(env_file)}", + f"Loading .env file: [green]{env_file}[/]", err=True, - verbosity=termui.DETAIL, + verbosity=termui.Verbosity.DETAIL, ) process_env.update( dotenv.dotenv_values(project.root / env_file, encoding="utf-8") @@ -131,9 +163,7 @@ def _run_process( expanded_command = project_env.which(command) if not expanded_command: raise PdmUsageError( - "Command {} is not found on your PATH.".format( - termui.green(f"'{command}'") - ) + f"Command [green]'{command}'[/] is not found on your PATH.".format() ) expanded_command = os.path.expanduser(os.path.expandvars(expanded_command)) expanded_args = [ @@ -163,9 +193,10 @@ def _run_process( signal.signal(signal.SIGINT, s) return process.returncode - def _run_task(self, task: Task, args: Sequence[str] = ()) -> int: + def _run_task( + self, task: Task, args: Sequence[str] = (), opts: TaskOptions | None = None + ) -> int: kind, _, value, options = task - options.pop("help", None) shell = False if kind == "cmd": if not isinstance(value, list): @@ -191,56 +222,72 @@ def _run_task(self, task: Task, args: Sequence[str] = ()) -> int: f"import sys, {module} as {short_name};" f"sys.exit({short_name}.{func})", ] + list(args) - if "env" in self.global_options: - options["env"] = {**self.global_options["env"], **options.get("env", {})} - options["env_file"] = options.get( - "env_file", self.global_options.get("env_file") - ) + elif kind == "composite": + assert isinstance(value, list) + self.project.core.ui.echo( - f"Running {task}: {termui.green(str(args))}", + f"Running {task}: [green]{str(args)}[/]", err=True, - verbosity=termui.DETAIL, + verbosity=termui.Verbosity.DETAIL, ) + if kind == "composite": + for script in value: + splitted = shlex.split(script) + cmd = splitted[0] + subargs = splitted[1:] + args # type: ignore + code = self.run(cmd, subargs, options) + if code != 0: + return code + return code return self._run_process( - args, chdir=True, shell=shell, **options # type: ignore + args, + chdir=True, + shell=shell, + **exec_opts(self.global_options, options, opts), ) - def run(self, command: str, args: Sequence[str]) -> int: + def run( + self, command: str, args: list[str], opts: TaskOptions | None = None + ) -> int: + if command in self.hooks.skip: + return 0 task = self._get_task(command) if task is not None: + self.hooks.try_emit("pre_script", script=command, args=args) pre_task = self._get_task(f"pre_{command}") - if pre_task is not None: - code = self._run_task(pre_task) + if pre_task is not None and self.hooks.should_run(pre_task.name): + code = self._run_task(pre_task, opts=opts) if code != 0: return code - code = self._run_task(task, args) + code = self._run_task(task, args, opts=opts) if code != 0: return code post_task = self._get_task(f"post_{command}") - if post_task is not None: - code = self._run_task(post_task) + if post_task is not None and self.hooks.should_run(post_task.name): + code = self._run_task(post_task, opts=opts) + self.hooks.try_emit("post_script", script=command, args=args) return code else: return self._run_process( - [command] + args, **self.global_options # type: ignore + [command] + args, + **exec_opts(self.global_options, opts), ) def show_list(self) -> None: if not self.project.scripts: return - columns = ["Name", "Type", "Script", "Description"] + columns = ["Name", "Type", "Description"] result = [] - for name in self.project.scripts: + for name in sorted(self.project.scripts): if name == "_": continue task = self._get_task(name) assert task is not None result.append( ( - termui.green(name), + f"[green]{name}[/]", task.kind, - str(task.args), - task.options.get("help", ""), + task.short_description, ) ) self.project.core.ui.display_columns(result, columns) @@ -250,6 +297,7 @@ class Command(BaseCommand): """Run commands or scripts with local packages loaded""" def add_arguments(self, parser: argparse.ArgumentParser) -> None: + skip_option.add_to_parser(parser) parser.add_argument( "-l", "--list", @@ -271,7 +319,8 @@ def add_arguments(self, parser: argparse.ArgumentParser) -> None: def handle(self, project: Project, options: argparse.Namespace) -> None: check_project_file(project) - runner = TaskRunner(project) + hooks = HookManager(project, options.skip) + runner = TaskRunner(project, hooks=hooks) if options.list: return runner.show_list() if options.site_packages: @@ -279,18 +328,21 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: if not options.command: project.core.ui.echo( "No command is given, default to the Python REPL.", - fg="yellow", + style="yellow", err=True, ) options.command = "python" - sys.exit(runner.run(options.command, options.args)) + hooks.try_emit("pre_run", script=options.command, args=options.args) + exit_code = runner.run(options.command, options.args) + hooks.try_emit("post_run", script=options.command, args=options.args) + sys.exit(exit_code) def run_script_if_present(script_name: str) -> Callable: """Helper to create a signal handler to run specific script""" - def handler(sender: Project, **kwargs: Any) -> None: - runner = TaskRunner(sender) + def handler(sender: Project, hooks: HookManager, **kwargs: Any) -> None: + runner = TaskRunner(sender, hooks) task = runner._get_task(script_name) if task is None: return @@ -299,3 +351,7 @@ def handler(sender: Project, **kwargs: Any) -> None: sys.exit(exit_code) return handler + + +for hook in KNOWN_HOOKS: + getattr(signals, hook).connect(run_script_if_present(hook), weak=False) diff --git a/pdm/cli/commands/search.py b/pdm/cli/commands/search.py index eeadc8cf48..aa1b74d88f 100644 --- a/pdm/cli/commands/search.py +++ b/pdm/cli/commands/search.py @@ -36,12 +36,7 @@ def print_results( ) current_width = len(name) + len(latest) + 4 spaces = " " * (name_column_width - current_width) - line = "{name} ({latest}){spaces} - {summary}".format( - name=termui.green(name, bold=True), - latest=termui.yellow(latest), - spaces=spaces, - summary=summary, - ) + line = f"[bold green]{name}[/] ([yellow]{latest}[/]){spaces} - {summary}" try: ui.echo(line) if normalize_name(name) in working_set: diff --git a/pdm/cli/commands/show.py b/pdm/cli/commands/show.py index 6540f1bfe1..878c48ac4a 100644 --- a/pdm/cli/commands/show.py +++ b/pdm/cli/commands/show.py @@ -2,7 +2,6 @@ from packaging.version import Version -from pdm import termui from pdm.cli.commands.base import BaseCommand from pdm.exceptions import PdmUsageError from pdm.models.candidates import Candidate @@ -44,8 +43,9 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: latest = next(iter(matches), None) if not latest: project.core.ui.echo( - termui.yellow(f"No match found for the package {package!r}"), + f"No match found for the package {package!r}", err=True, + style="yellow", ) return latest_stable = next(filter(filter_stable, matches), None) diff --git a/pdm/cli/commands/sync.py b/pdm/cli/commands/sync.py index ca90f6dc2f..598b071ef8 100644 --- a/pdm/cli/commands/sync.py +++ b/pdm/cli/commands/sync.py @@ -2,12 +2,14 @@ from pdm.cli import actions from pdm.cli.commands.base import BaseCommand +from pdm.cli.hooks import HookManager from pdm.cli.options import ( clean_group, dry_run_option, groups_group, install_group, lockfile_option, + skip_option, ) from pdm.project import Project @@ -25,6 +27,7 @@ def add_arguments(self, parser: argparse.ArgumentParser) -> None: action="store_true", help="Force reinstall existing dependencies", ) + skip_option.add_to_parser(parser) clean_group.add_to_parser(parser) install_group.add_to_parser(parser) @@ -40,4 +43,5 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: no_editable=options.no_editable, no_self=options.no_self, reinstall=options.reinstall, + hooks=HookManager(project, options.skip), ) diff --git a/pdm/cli/commands/update.py b/pdm/cli/commands/update.py index 0a73aaf965..5ba03909fc 100644 --- a/pdm/cli/commands/update.py +++ b/pdm/cli/commands/update.py @@ -2,12 +2,14 @@ from pdm.cli import actions from pdm.cli.commands.base import BaseCommand +from pdm.cli.hooks import HookManager from pdm.cli.options import ( groups_group, install_group, lockfile_option, prerelease_option, save_strategy_group, + skip_option, unconstrained_option, update_strategy_group, ) @@ -25,6 +27,7 @@ class Command(BaseCommand): update_strategy_group, prerelease_option, unconstrained_option, + skip_option, ] def add_arguments(self, parser: argparse.ArgumentParser) -> None: @@ -69,4 +72,5 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: no_editable=options.no_editable, no_self=options.no_self, prerelease=options.prerelease, + hooks=HookManager(project, options.skip), ) diff --git a/pdm/cli/commands/use.py b/pdm/cli/commands/use.py index c889ab1504..0d8c210963 100644 --- a/pdm/cli/commands/use.py +++ b/pdm/cli/commands/use.py @@ -2,6 +2,8 @@ from pdm.cli import actions from pdm.cli.commands.base import BaseCommand +from pdm.cli.hooks import HookManager +from pdm.cli.options import skip_option from pdm.project import Project @@ -9,6 +11,7 @@ class Command(BaseCommand): """Use the given python version or path as base interpreter""" def add_arguments(self, parser: argparse.ArgumentParser) -> None: + skip_option.add_to_parser(parser) parser.add_argument( "-f", "--first", @@ -27,5 +30,9 @@ def add_arguments(self, parser: argparse.ArgumentParser) -> None: def handle(self, project: Project, options: argparse.Namespace) -> None: actions.do_use( - project, options.python, options.first, options.ignore_remembered + project, + python=options.python, + first=options.first, + ignore_remembered=options.ignore_remembered, + hooks=HookManager(project, options.skip), ) diff --git a/pdm/cli/completions/pdm.bash b/pdm/cli/completions/pdm.bash index cbd0a6cb1c..6063d0f6aa 100644 --- a/pdm/cli/completions/pdm.bash +++ b/pdm/cli/completions/pdm.bash @@ -1,7 +1,7 @@ # BASH completion script for pdm # Generated by pycomplete 0.3.2 -_pdm_a919b69078acdf0a_complete() +_pdm_25182a7ef85b840e_complete() { local cur script coms opts com COMPREPLY=() @@ -29,11 +29,11 @@ _pdm_a919b69078acdf0a_complete() case "$com" in (add) - opts="--dev --dry-run --editable --global --group --help --lockfile --no-editable --no-isolation --no-self --no-sync --prerelease --project --save-compatible --save-exact --save-minimum --save-wildcard --unconstrained --update-all --update-eager --update-reuse --verbose" + opts="--dev --dry-run --editable --global --group --help --lockfile --no-editable --no-isolation --no-self --no-sync --prerelease --project --save-compatible --save-exact --save-minimum --save-wildcard --skip --unconstrained --update-all --update-eager --update-reuse --verbose" ;; (build) - opts="--config-setting --dest --help --no-clean --no-isolation --no-sdist --no-wheel --project --verbose" + opts="--config-setting --dest --help --no-clean --no-isolation --no-sdist --no-wheel --project --skip --verbose" ;; (cache) @@ -61,11 +61,11 @@ _pdm_a919b69078acdf0a_complete() ;; (init) - opts="--global --help --non-interactive --project --verbose" + opts="--global --help --non-interactive --project --skip --verbose" ;; (install) - opts="--check --dev --dry-run --global --group --help --lockfile --no-default --no-editable --no-isolation --no-lock --no-self --production --project --verbose" + opts="--check --dev --dry-run --global --group --help --lockfile --no-default --no-editable --no-isolation --no-lock --no-self --production --project --skip --verbose" ;; (list) @@ -73,19 +73,23 @@ _pdm_a919b69078acdf0a_complete() ;; (lock) - opts="--global --help --lockfile --no-isolation --project --refresh --verbose" + opts="--global --help --lockfile --no-isolation --project --refresh --skip --verbose" ;; (plugin) opts="--help --verbose" ;; + (publish) + opts="--comment --help --identity --no-build --password --project --repository --sign --skip --username --verbose" + ;; + (remove) - opts="--dev --dry-run --global --group --help --lockfile --no-editable --no-isolation --no-self --no-sync --project --verbose" + opts="--dev --dry-run --global --group --help --lockfile --no-editable --no-isolation --no-self --no-sync --project --skip --verbose" ;; (run) - opts="--global --help --list --project --site-packages --verbose" + opts="--global --help --list --project --site-packages --skip --verbose" ;; (search) @@ -97,11 +101,11 @@ _pdm_a919b69078acdf0a_complete() ;; (sync) - opts="--clean --dev --dry-run --global --group --help --lockfile --no-clean --no-default --no-editable --no-isolation --no-self --production --project --reinstall --verbose" + opts="--clean --dev --dry-run --global --group --help --lockfile --no-clean --no-default --no-editable --no-isolation --no-self --production --project --reinstall --skip --verbose" ;; (update) - opts="--dev --global --group --help --lockfile --no-default --no-editable --no-isolation --no-self --no-sync --outdated --prerelease --production --project --save-compatible --save-exact --save-minimum --save-wildcard --top --unconstrained --update-all --update-eager --update-reuse --verbose" + opts="--dev --global --group --help --lockfile --no-default --no-editable --no-isolation --no-self --no-sync --outdated --prerelease --production --project --save-compatible --save-exact --save-minimum --save-wildcard --skip --top --unconstrained --update-all --update-eager --update-reuse --verbose" ;; (use) @@ -118,7 +122,7 @@ _pdm_a919b69078acdf0a_complete() # completing for a command if [[ $cur == $com ]]; then - coms="add build cache completion config export import info init install list lock plugin remove run search show sync update use" + coms="add build cache completion config export import info init install list lock plugin publish remove run search show sync update use" COMPREPLY=($(compgen -W "${coms}" -- ${cur})) __ltrim_colon_completions "$cur" @@ -127,4 +131,4 @@ _pdm_a919b69078acdf0a_complete() fi } -complete -o default -F _pdm_a919b69078acdf0a_complete pdm +complete -o default -F _pdm_25182a7ef85b840e_complete pdm diff --git a/pdm/cli/completions/pdm.fish b/pdm/cli/completions/pdm.fish index 2493177ec6..48dd4e61e8 100644 --- a/pdm/cli/completions/pdm.fish +++ b/pdm/cli/completions/pdm.fish @@ -1,9 +1,9 @@ # FISH completion script for pdm # Generated by pycomplete 0.3.2 -function __fish_pdm_a919b69078acdf0a_complete_no_subcommand +function __fish_pdm_7426f3abf02b4bb8_complete_no_subcommand for i in (commandline -opc) - if contains -- $i add build cache completion config export import info init install list lock plugin remove run search show sync update use + if contains -- $i add build cache completion config export import info init install list lock plugin publish remove run search show sync update use return 1 end end @@ -11,34 +11,35 @@ function __fish_pdm_a919b69078acdf0a_complete_no_subcommand end # global options -complete -c pdm -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -l config -d 'Specify another config file path(env var: PDM_CONFIG_FILE)' -complete -c pdm -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -l help -d 'show this help message and exit' -complete -c pdm -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -l ignore-python -d 'Ignore the Python path saved in the pdm.toml config' -complete -c pdm -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -l pep582 -d 'Print the command line to be eval\'d by the shell' -complete -c pdm -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -l verbose -d '-v for detailed output and -vv for more detailed' -complete -c pdm -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -l version -d 'Show version' +complete -c pdm -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -l config -d 'Specify another config file path(env var: PDM_CONFIG_FILE)' +complete -c pdm -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -l help -d 'show this help message and exit' +complete -c pdm -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -l ignore-python -d 'Ignore the Python path saved in the pdm.toml config' +complete -c pdm -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -l pep582 -d 'Print the command line to be eval\'d by the shell' +complete -c pdm -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -l verbose -d '-v for detailed output and -vv for more detailed' +complete -c pdm -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -l version -d 'Show version' # commands -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a add -d 'Add package(s) to pyproject.toml and install them' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a build -d 'Build artifacts for distribution' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a cache -d 'Control the caches of PDM' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a completion -d 'Generate completion scripts for the given shell' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a config -d 'Display the current configuration' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a export -d 'Export the locked packages set to other formats' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a import -d 'Import project metadata from other formats' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a info -d 'Show the project information' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a init -d 'Initialize a pyproject.toml for PDM' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a install -d 'Install dependencies from lock file' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a list -d 'List packages installed in the current working set' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a lock -d 'Resolve and lock dependencies' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a plugin -d 'Manage the PDM plugins' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a remove -d 'Remove packages from pyproject.toml' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a run -d 'Run commands or scripts with local packages loaded' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a search -d 'Search for PyPI packages' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a show -d 'Show the package information' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a sync -d 'Synchronize the current working set with lock file' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a update -d 'Update package(s) in pyproject.toml' -complete -c pdm -f -n '__fish_pdm_a919b69078acdf0a_complete_no_subcommand' -a use -d 'Use the given python version or path as base interpreter' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a add -d 'Add package(s) to pyproject.toml and install them' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a build -d 'Build artifacts for distribution' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a cache -d 'Control the caches of PDM' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a completion -d 'Generate completion scripts for the given shell' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a config -d 'Display the current configuration' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a export -d 'Export the locked packages set to other formats' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a import -d 'Import project metadata from other formats' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a info -d 'Show the project information' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a init -d 'Initialize a pyproject.toml for PDM' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a install -d 'Install dependencies from lock file' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a list -d 'List packages installed in the current working set' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a lock -d 'Resolve and lock dependencies' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a plugin -d 'Manage the PDM plugins' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a publish -d 'Build and publish the project to PyPI' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a remove -d 'Remove packages from pyproject.toml' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a run -d 'Run commands or scripts with local packages loaded' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a search -d 'Search for PyPI packages' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a show -d 'Show the package information' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a sync -d 'Synchronize the current working set with lock file' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a update -d 'Update package(s) in pyproject.toml' +complete -c pdm -f -n '__fish_pdm_7426f3abf02b4bb8_complete_no_subcommand' -a use -d 'Use the given python version or path as base interpreter' # command options @@ -49,7 +50,7 @@ complete -c pdm -A -n '__fish_seen_subcommand_from add' -l editable -d 'Specify complete -c pdm -A -n '__fish_seen_subcommand_from add' -l global -d 'Use the global project, supply the project root with `-p` option' complete -c pdm -A -n '__fish_seen_subcommand_from add' -l group -d 'Specify the target dependency group to add into' complete -c pdm -A -n '__fish_seen_subcommand_from add' -l help -d 'show this help message and exit' -complete -c pdm -A -n '__fish_seen_subcommand_from add' -l lockfile -d 'Specify another lockfile path, or use `PDM_LOCKFILE` env variable. Default: pdm.lock' +complete -c pdm -A -n '__fish_seen_subcommand_from add' -l lockfile -d 'Specify another lockfile path. Default: pdm.lock. [env var: PDM_LOCKFILE]' complete -c pdm -A -n '__fish_seen_subcommand_from add' -l no-editable -d 'Install non-editable versions for all packages' complete -c pdm -A -n '__fish_seen_subcommand_from add' -l no-isolation -d 'Do not isolate the build in a clean environment' complete -c pdm -A -n '__fish_seen_subcommand_from add' -l no-self -d 'Don\'t install the project itself' @@ -60,8 +61,9 @@ complete -c pdm -A -n '__fish_seen_subcommand_from add' -l save-compatible -d 'S complete -c pdm -A -n '__fish_seen_subcommand_from add' -l save-exact -d 'Save exact version specifiers' complete -c pdm -A -n '__fish_seen_subcommand_from add' -l save-minimum -d 'Save minimum version specifiers' complete -c pdm -A -n '__fish_seen_subcommand_from add' -l save-wildcard -d 'Save wildcard version specifiers' +complete -c pdm -A -n '__fish_seen_subcommand_from add' -l skip -d 'Skip some tasks and/or hooks by their comma-separated names. Can be supplied multiple times. Use ":all" to skip all hooks. Use ":pre" and ":post" to skip all pre or post hooks.' complete -c pdm -A -n '__fish_seen_subcommand_from add' -l unconstrained -d 'Ignore the version constraint of packages' -complete -c pdm -A -n '__fish_seen_subcommand_from add' -l update-all -d 'Update all dependencies and sub depenencies' +complete -c pdm -A -n '__fish_seen_subcommand_from add' -l update-all -d 'Update all dependencies and sub-depenencies' complete -c pdm -A -n '__fish_seen_subcommand_from add' -l update-eager -d 'Try to update the packages and their dependencies recursively' complete -c pdm -A -n '__fish_seen_subcommand_from add' -l update-reuse -d 'Reuse pinned versions already present in lock file if possible' complete -c pdm -A -n '__fish_seen_subcommand_from add' -l verbose -d '-v for detailed output and -vv for more detailed' @@ -75,6 +77,7 @@ complete -c pdm -A -n '__fish_seen_subcommand_from build' -l no-isolation -d 'Do complete -c pdm -A -n '__fish_seen_subcommand_from build' -l no-sdist -d 'Don\'t build source tarballs' complete -c pdm -A -n '__fish_seen_subcommand_from build' -l no-wheel -d 'Don\'t build wheels' complete -c pdm -A -n '__fish_seen_subcommand_from build' -l project -d 'Specify another path as the project root, which changes the base of pyproject.toml and __pypackages__' +complete -c pdm -A -n '__fish_seen_subcommand_from build' -l skip -d 'Skip some tasks and/or hooks by their comma-separated names. Can be supplied multiple times. Use ":all" to skip all hooks. Use ":pre" and ":post" to skip all pre or post hooks.' complete -c pdm -A -n '__fish_seen_subcommand_from build' -l verbose -d '-v for detailed output and -vv for more detailed' # cache @@ -98,7 +101,7 @@ complete -c pdm -A -n '__fish_seen_subcommand_from export' -l format -d 'Specify complete -c pdm -A -n '__fish_seen_subcommand_from export' -l global -d 'Use the global project, supply the project root with `-p` option' complete -c pdm -A -n '__fish_seen_subcommand_from export' -l group -d 'Select group of optional-dependencies or dev-dependencies(with -d). Can be supplied multiple times, use ":all" to include all groups under the same species.' complete -c pdm -A -n '__fish_seen_subcommand_from export' -l help -d 'show this help message and exit' -complete -c pdm -A -n '__fish_seen_subcommand_from export' -l lockfile -d 'Specify another lockfile path, or use `PDM_LOCKFILE` env variable. Default: pdm.lock' +complete -c pdm -A -n '__fish_seen_subcommand_from export' -l lockfile -d 'Specify another lockfile path. Default: pdm.lock. [env var: PDM_LOCKFILE]' complete -c pdm -A -n '__fish_seen_subcommand_from export' -l no-default -d 'Don\'t include dependencies from the default group' complete -c pdm -A -n '__fish_seen_subcommand_from export' -l output -d 'Write output to the given file, or print to stdout if not given' complete -c pdm -A -n '__fish_seen_subcommand_from export' -l production -d 'Unselect dev dependencies' @@ -131,6 +134,7 @@ complete -c pdm -A -n '__fish_seen_subcommand_from init' -l global -d 'Use the g complete -c pdm -A -n '__fish_seen_subcommand_from init' -l help -d 'show this help message and exit' complete -c pdm -A -n '__fish_seen_subcommand_from init' -l non-interactive -d 'Don\'t ask questions but use default values' complete -c pdm -A -n '__fish_seen_subcommand_from init' -l project -d 'Specify another path as the project root, which changes the base of pyproject.toml and __pypackages__' +complete -c pdm -A -n '__fish_seen_subcommand_from init' -l skip -d 'Skip some tasks and/or hooks by their comma-separated names. Can be supplied multiple times. Use ":all" to skip all hooks. Use ":pre" and ":post" to skip all pre or post hooks.' complete -c pdm -A -n '__fish_seen_subcommand_from init' -l verbose -d '-v for detailed output and -vv for more detailed' # install @@ -140,7 +144,7 @@ complete -c pdm -A -n '__fish_seen_subcommand_from install' -l dry-run -d 'Show complete -c pdm -A -n '__fish_seen_subcommand_from install' -l global -d 'Use the global project, supply the project root with `-p` option' complete -c pdm -A -n '__fish_seen_subcommand_from install' -l group -d 'Select group of optional-dependencies or dev-dependencies(with -d). Can be supplied multiple times, use ":all" to include all groups under the same species.' complete -c pdm -A -n '__fish_seen_subcommand_from install' -l help -d 'show this help message and exit' -complete -c pdm -A -n '__fish_seen_subcommand_from install' -l lockfile -d 'Specify another lockfile path, or use `PDM_LOCKFILE` env variable. Default: pdm.lock' +complete -c pdm -A -n '__fish_seen_subcommand_from install' -l lockfile -d 'Specify another lockfile path. Default: pdm.lock. [env var: PDM_LOCKFILE]' complete -c pdm -A -n '__fish_seen_subcommand_from install' -l no-default -d 'Don\'t include dependencies from the default group' complete -c pdm -A -n '__fish_seen_subcommand_from install' -l no-editable -d 'Install non-editable versions for all packages' complete -c pdm -A -n '__fish_seen_subcommand_from install' -l no-isolation -d 'Do not isolate the build in a clean environment' @@ -148,6 +152,7 @@ complete -c pdm -A -n '__fish_seen_subcommand_from install' -l no-lock -d 'Don\' complete -c pdm -A -n '__fish_seen_subcommand_from install' -l no-self -d 'Don\'t install the project itself' complete -c pdm -A -n '__fish_seen_subcommand_from install' -l production -d 'Unselect dev dependencies' complete -c pdm -A -n '__fish_seen_subcommand_from install' -l project -d 'Specify another path as the project root, which changes the base of pyproject.toml and __pypackages__' +complete -c pdm -A -n '__fish_seen_subcommand_from install' -l skip -d 'Skip some tasks and/or hooks by their comma-separated names. Can be supplied multiple times. Use ":all" to skip all hooks. Use ":pre" and ":post" to skip all pre or post hooks.' complete -c pdm -A -n '__fish_seen_subcommand_from install' -l verbose -d '-v for detailed output and -vv for more detailed' # list @@ -163,28 +168,43 @@ complete -c pdm -A -n '__fish_seen_subcommand_from list' -l verbose -d '-v for d # lock complete -c pdm -A -n '__fish_seen_subcommand_from lock' -l global -d 'Use the global project, supply the project root with `-p` option' complete -c pdm -A -n '__fish_seen_subcommand_from lock' -l help -d 'show this help message and exit' -complete -c pdm -A -n '__fish_seen_subcommand_from lock' -l lockfile -d 'Specify another lockfile path, or use `PDM_LOCKFILE` env variable. Default: pdm.lock' +complete -c pdm -A -n '__fish_seen_subcommand_from lock' -l lockfile -d 'Specify another lockfile path. Default: pdm.lock. [env var: PDM_LOCKFILE]' complete -c pdm -A -n '__fish_seen_subcommand_from lock' -l no-isolation -d 'Do not isolate the build in a clean environment' complete -c pdm -A -n '__fish_seen_subcommand_from lock' -l project -d 'Specify another path as the project root, which changes the base of pyproject.toml and __pypackages__' complete -c pdm -A -n '__fish_seen_subcommand_from lock' -l refresh -d 'Don\'t update pinned versions, only refresh the lock file' +complete -c pdm -A -n '__fish_seen_subcommand_from lock' -l skip -d 'Skip some tasks and/or hooks by their comma-separated names. Can be supplied multiple times. Use ":all" to skip all hooks. Use ":pre" and ":post" to skip all pre or post hooks.' complete -c pdm -A -n '__fish_seen_subcommand_from lock' -l verbose -d '-v for detailed output and -vv for more detailed' # plugin complete -c pdm -A -n '__fish_seen_subcommand_from plugin' -l help -d 'show this help message and exit' complete -c pdm -A -n '__fish_seen_subcommand_from plugin' -l verbose -d '-v for detailed output and -vv for more detailed' +# publish +complete -c pdm -A -n '__fish_seen_subcommand_from publish' -l comment -d 'The comment to include with the distribution file.' +complete -c pdm -A -n '__fish_seen_subcommand_from publish' -l help -d 'show this help message and exit' +complete -c pdm -A -n '__fish_seen_subcommand_from publish' -l identity -d 'GPG identity used to sign files.' +complete -c pdm -A -n '__fish_seen_subcommand_from publish' -l no-build -d 'Don\'t build the package before publishing' +complete -c pdm -A -n '__fish_seen_subcommand_from publish' -l password -d 'The password to access the repository [env var: PDM_PUBLISH_PASSWORD]' +complete -c pdm -A -n '__fish_seen_subcommand_from publish' -l project -d 'Specify another path as the project root, which changes the base of pyproject.toml and __pypackages__' +complete -c pdm -A -n '__fish_seen_subcommand_from publish' -l repository -d 'The repository name or url to publish the package to [env var: PDM_PUBLISH_REPO]' +complete -c pdm -A -n '__fish_seen_subcommand_from publish' -l sign -d 'Upload the package with PGP signature' +complete -c pdm -A -n '__fish_seen_subcommand_from publish' -l skip -d 'Skip some tasks and/or hooks by their comma-separated names. Can be supplied multiple times. Use ":all" to skip all hooks. Use ":pre" and ":post" to skip all pre or post hooks.' +complete -c pdm -A -n '__fish_seen_subcommand_from publish' -l username -d 'The username to access the repository [env var: PDM_PUBLISH_USERNAME]' +complete -c pdm -A -n '__fish_seen_subcommand_from publish' -l verbose -d '-v for detailed output and -vv for more detailed' + # remove complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l dev -d 'Remove packages from dev dependencies' complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l dry-run -d 'Show the difference only and don\'t perform any action' complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l global -d 'Use the global project, supply the project root with `-p` option' complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l group -d 'Specify the target dependency group to remove from' complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l help -d 'show this help message and exit' -complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l lockfile -d 'Specify another lockfile path, or use `PDM_LOCKFILE` env variable. Default: pdm.lock' +complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l lockfile -d 'Specify another lockfile path. Default: pdm.lock. [env var: PDM_LOCKFILE]' complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l no-editable -d 'Install non-editable versions for all packages' complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l no-isolation -d 'Do not isolate the build in a clean environment' complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l no-self -d 'Don\'t install the project itself' complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l no-sync -d 'Only write pyproject.toml and do not uninstall packages' complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l project -d 'Specify another path as the project root, which changes the base of pyproject.toml and __pypackages__' +complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l skip -d 'Skip some tasks and/or hooks by their comma-separated names. Can be supplied multiple times. Use ":all" to skip all hooks. Use ":pre" and ":post" to skip all pre or post hooks.' complete -c pdm -A -n '__fish_seen_subcommand_from remove' -l verbose -d '-v for detailed output and -vv for more detailed' # run @@ -193,6 +213,7 @@ complete -c pdm -A -n '__fish_seen_subcommand_from run' -l help -d 'show this he complete -c pdm -A -n '__fish_seen_subcommand_from run' -l list -d 'Show all available scripts defined in pyproject.toml' complete -c pdm -A -n '__fish_seen_subcommand_from run' -l project -d 'Specify another path as the project root, which changes the base of pyproject.toml and __pypackages__' complete -c pdm -A -n '__fish_seen_subcommand_from run' -l site-packages -d 'Load site-packages from the selected interpreter' +complete -c pdm -A -n '__fish_seen_subcommand_from run' -l skip -d 'Skip some tasks and/or hooks by their comma-separated names. Can be supplied multiple times. Use ":all" to skip all hooks. Use ":pre" and ":post" to skip all pre or post hooks.' complete -c pdm -A -n '__fish_seen_subcommand_from run' -l verbose -d '-v for detailed output and -vv for more detailed' # search @@ -218,7 +239,7 @@ complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l dry-run -d 'Show the complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l global -d 'Use the global project, supply the project root with `-p` option' complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l group -d 'Select group of optional-dependencies or dev-dependencies(with -d). Can be supplied multiple times, use ":all" to include all groups under the same species.' complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l help -d 'show this help message and exit' -complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l lockfile -d 'Specify another lockfile path, or use `PDM_LOCKFILE` env variable. Default: pdm.lock' +complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l lockfile -d 'Specify another lockfile path. Default: pdm.lock. [env var: PDM_LOCKFILE]' complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l no-clean -d 'don\'t clean unused packages' complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l no-default -d 'Don\'t include dependencies from the default group' complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l no-editable -d 'Install non-editable versions for all packages' @@ -227,6 +248,7 @@ complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l no-self -d 'Don\'t i complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l production -d 'Unselect dev dependencies' complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l project -d 'Specify another path as the project root, which changes the base of pyproject.toml and __pypackages__' complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l reinstall -d 'Force reinstall existing dependencies' +complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l skip -d 'Skip some tasks and/or hooks by their comma-separated names. Can be supplied multiple times. Use ":all" to skip all hooks. Use ":pre" and ":post" to skip all pre or post hooks.' complete -c pdm -A -n '__fish_seen_subcommand_from sync' -l verbose -d '-v for detailed output and -vv for more detailed' # update @@ -234,7 +256,7 @@ complete -c pdm -A -n '__fish_seen_subcommand_from update' -l dev -d 'Select dev complete -c pdm -A -n '__fish_seen_subcommand_from update' -l global -d 'Use the global project, supply the project root with `-p` option' complete -c pdm -A -n '__fish_seen_subcommand_from update' -l group -d 'Select group of optional-dependencies or dev-dependencies(with -d). Can be supplied multiple times, use ":all" to include all groups under the same species.' complete -c pdm -A -n '__fish_seen_subcommand_from update' -l help -d 'show this help message and exit' -complete -c pdm -A -n '__fish_seen_subcommand_from update' -l lockfile -d 'Specify another lockfile path, or use `PDM_LOCKFILE` env variable. Default: pdm.lock' +complete -c pdm -A -n '__fish_seen_subcommand_from update' -l lockfile -d 'Specify another lockfile path. Default: pdm.lock. [env var: PDM_LOCKFILE]' complete -c pdm -A -n '__fish_seen_subcommand_from update' -l no-default -d 'Don\'t include dependencies from the default group' complete -c pdm -A -n '__fish_seen_subcommand_from update' -l no-editable -d 'Install non-editable versions for all packages' complete -c pdm -A -n '__fish_seen_subcommand_from update' -l no-isolation -d 'Do not isolate the build in a clean environment' @@ -248,9 +270,10 @@ complete -c pdm -A -n '__fish_seen_subcommand_from update' -l save-compatible -d complete -c pdm -A -n '__fish_seen_subcommand_from update' -l save-exact -d 'Save exact version specifiers' complete -c pdm -A -n '__fish_seen_subcommand_from update' -l save-minimum -d 'Save minimum version specifiers' complete -c pdm -A -n '__fish_seen_subcommand_from update' -l save-wildcard -d 'Save wildcard version specifiers' +complete -c pdm -A -n '__fish_seen_subcommand_from update' -l skip -d 'Skip some tasks and/or hooks by their comma-separated names. Can be supplied multiple times. Use ":all" to skip all hooks. Use ":pre" and ":post" to skip all pre or post hooks.' complete -c pdm -A -n '__fish_seen_subcommand_from update' -l top -d 'Only update those list in pyproject.toml' complete -c pdm -A -n '__fish_seen_subcommand_from update' -l unconstrained -d 'Ignore the version constraint of packages' -complete -c pdm -A -n '__fish_seen_subcommand_from update' -l update-all -d 'Update all dependencies and sub depenencies' +complete -c pdm -A -n '__fish_seen_subcommand_from update' -l update-all -d 'Update all dependencies and sub-depenencies' complete -c pdm -A -n '__fish_seen_subcommand_from update' -l update-eager -d 'Try to update the packages and their dependencies recursively' complete -c pdm -A -n '__fish_seen_subcommand_from update' -l update-reuse -d 'Reuse pinned versions already present in lock file if possible' complete -c pdm -A -n '__fish_seen_subcommand_from update' -l verbose -d '-v for detailed output and -vv for more detailed' diff --git a/pdm/cli/completions/pdm.ps1 b/pdm/cli/completions/pdm.ps1 index db98405943..44b90bd8b7 100644 --- a/pdm/cli/completions/pdm.ps1 +++ b/pdm/cli/completions/pdm.ps1 @@ -6,6 +6,7 @@ if ((Test-Path Function:\TabExpansion) -and -not (Test-Path Function:\_pdm_compl $PDM_PYTHON = "%{python_executable}" $PDM_PIP_INDEX = (& $PDM_PYTHON -m pdm config pypi.url).Trim() +$CONFIG_DIR = "$env:LOCALAPPDATA\pdm" class Option { [string[]] $Opts @@ -110,14 +111,14 @@ function getSections() { } function _fetchPackageListFromPyPI() { - if (-not (Test-Path -Path "~/.pdm")) { - mkdir "~/.pdm" + if (-not (Test-Path -Path $CONFIG_DIR)) { + mkdir $CONFIG_DIR } - (Invoke-WebRequest $PDM_PIP_INDEX).Links | ForEach-Object { $_.innerText } | Out-File -FilePath "~/.pdm/.pypiPackages" + (Invoke-WebRequest $PDM_PIP_INDEX).Links | ForEach-Object { $_.innerText } | Out-File -FilePath "$CONFIG_DIR\.pypiPackages" } function getPyPIPackages() { - # $cacheFile = "~/.pdm/.pypiPackages" + # $cacheFile = "$CONFIG_DIR\.pypiPackages" # if (-not (Test-Path -Path $cacheFile) -or (Get-Item $cacheFile).LastWriteTime -lt (Get-Date).AddDays(-28)) { # _fetchPackageListFromPyPI # } @@ -125,13 +126,19 @@ function getPyPIPackages() { } function getPdmPackages() { - & $PDM_PYTHON -c "import os, re, tomli + & $PDM_PYTHON -c " +import sys +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib +import os, re PACKAGE_REGEX = re.compile(r'^[A-Za-z][A-Za-z0-9._-]*') def get_packages(lines): return [PACKAGE_REGEX.match(line).group() for line in lines] with open('pyproject.toml', 'rb') as f: - data = tomli.load(f) + data = tomllib.load(f) packages = get_packages(data.get('project', {}).get('dependencies', [])) for reqs in data.get('project', {}).get('optional-dependencies', {}).values(): packages.extend(get_packages(reqs)) @@ -185,13 +192,14 @@ function TabExpansion($line, $lastWord) { if ($lastBlock -match "^pdm ") { [string[]]$words = $lastBlock.Split()[1..$lastBlock.Length] - [string[]]$AllCommands = ("add", "build", "cache", "completion", "config", "export", "import", "info", "init", "install", "list", "lock", "plugin", "remove", "run", "search", "show", "sync", "update", "use") + [string[]]$AllCommands = ("add", "build", "cache", "completion", "config", "export", "import", "info", "init", "install", "list", "lock", "plugin", "publish", "remove", "run", "search", "show", "sync", "update", "use") [string[]]$commands = $words.Where( { $_ -notlike "-*" }) $command = $commands[0] $completer = [Completer]::new() $completer.AddOpts(([Option]::new(("-h", "--help", "-v", "--verbose")))) $sectionOption = [Option]::new(@("-G", "--group")).WithValues(@(getSections)) $projectOption = [Option]::new(@("-p", "--project")).WithValues(@()) + $skipOption = [Option]::new(@("-k", "--skip")).WithValues(@()) $formatOption = [Option]::new(@("-f", "--format")).WithValues(@("setuppy", "requirements", "poetry", "flit")) Switch ($command) { @@ -201,12 +209,13 @@ function TabExpansion($line, $lastWord) { [Option]::new(("-d", "--dev", "--save-compatible", "--save-wildcard", "--dry-run", "--save-exact", "--save-minimum", "--update-eager", "--update-reuse", "--update-all", "-g", "--global", "--no-sync", "--no-editable", "--no-self", "-u", "--unconstrained", "--no-isolation", "--pre", "--prerelease", "-L", "--lockfile")), $sectionOption, $projectOption, + $skipOption [Option]::new(@("-e", "--editable")).WithValues(@(getPyPIPackages)) )) $completer.AddParams(@(getPyPIPackages), $true) break } - "build" { $completer.AddOpts(@([Option]::new(@("-d", "--dest", "--no-clean", "--no-sdist", "--no-wheel", "-C", "--config-setting", "--no-isolation")), $projectOption)) } + "build" { $completer.AddOpts(@([Option]::new(@("-d", "--dest", "--no-clean", "--no-sdist", "--no-wheel", "-C", "--config-setting", "--no-isolation")), $projectOption, $skipOption)) } "cache" { $subCommand = $commands[1] switch ($subCommand) { @@ -258,7 +267,8 @@ function TabExpansion($line, $lastWord) { $completer.AddOpts( @( [Option]::new(@("-g", "--global", "--non-interactive", "-n")), - $projectOption + $projectOption, + $skipOption )) break } @@ -266,6 +276,7 @@ function TabExpansion($line, $lastWord) { $completer.AddOpts(@( [Option]::new(("-d", "--dev", "-g", "--global", "--dry-run", "--no-default", "--no-lock", "--prod", "--production", "--no-editable", "--no-self", "--no-isolation", "--check", "-L", "--lockfile")), $sectionOption, + $skipOption, $projectOption )) break @@ -282,6 +293,7 @@ function TabExpansion($line, $lastWord) { $completer.AddOpts( @( [Option]::new(@("--global", "-g", "--no-isolation", "--refresh", "-L", "--lockfile")), + $skipOption, $projectOption )) break @@ -307,11 +319,21 @@ function TabExpansion($line, $lastWord) { } break } + "publish" { + $completer.AddOpts( + @( + [Option]::new(@("-r", "--repository", "-u", "--username", "-P", "--password", "-S", "--sign", "-i", "--identity", "-c", "--comment", "--no-build")), + $skipOption, + $projectOption + )) + break + } "remove" { $completer.AddOpts( @( [Option]::new(@("--global", "-g", "--dev", "-d", "--dry-run", "--no-sync", "--no-editable", "--no-self", "--no-isolation", "-L", "--lockfile")), $projectOption, + $skipOption, $sectionOption )) $completer.AddParams(@(getPdmPackages), $true) @@ -321,6 +343,7 @@ function TabExpansion($line, $lastWord) { $completer.AddOpts( @( [Option]::new(@("--global", "-g", "-l", "--list", "-s", "--site-packages")), + $skipOption, $projectOption )) $completer.AddParams(@(getScripts), $false) @@ -339,6 +362,7 @@ function TabExpansion($line, $lastWord) { $completer.AddOpts(@( [Option]::new(("-d", "--dev", "-g", "--global", "--no-default", "--clean", "--no-clean", "--dry-run", "-r", "--reinstall", "--prod", "--production", "--no-editable", "--no-self", "--no-isolation", "-L", "--lockfile")), $sectionOption, + $skipOption, $projectOption )) break @@ -347,6 +371,7 @@ function TabExpansion($line, $lastWord) { $completer.AddOpts(@( [Option]::new(("-d", "--dev", "--save-compatible", "--prod", "--production", "--save-wildcard", "--save-exact", "--save-minimum", "--update-eager", "--update-reuse", "--update-all", "-g", "--global", "--dry-run", "--outdated", "--top", "-u", "--unconstrained", "--no-editable", "--no-self", "--no-isolation", "--no-sync", "--pre", "--prerelease", "-L", "--lockfile")), $sectionOption, + $skipOption, $projectOption )) $completer.AddParams(@(getPdmPackages), $true) diff --git a/pdm/cli/completions/pdm.zsh b/pdm/cli/completions/pdm.zsh index 4f0787bf1a..5521063cdb 100644 --- a/pdm/cli/completions/pdm.zsh +++ b/pdm/cli/completions/pdm.zsh @@ -28,6 +28,7 @@ _pdm() { 'list:List packages installed in the current working set' 'lock:Resolve and lock dependencies' 'plugin:Manage the PDM plugins' + 'publish:Build and publish the project to PyPI' 'remove:Remove packages from pyproject.toml' 'run:Run commands or scripts with local packages loaded' 'search:Search for PyPI packages' @@ -70,6 +71,7 @@ _pdm() { '--update-all[Update all dependencies and sub-dependencies]' '--no-editable[Install non-editable versions for all packages]' "--no-self[Don't install the project itself]" + {-k,--skip}'[Skip some tasks and/or hooks by their comma-separated names]' {-u,--unconstrained}'[Ignore the version constraint of packages]' {--pre,--prerelease}'[Allow prereleases to be pinned]' {-e+,--editable+}'[Specify editable packages]:packages' @@ -83,6 +85,7 @@ _pdm() { "--no-sdist[Don't build source tarballs]" "--no-wheel[Don't build wheels]" {-d+,--dest+}'[Target directory to put artifacts]:directory:_files -/' + {-k,--skip}'[Skip some tasks and/or hooks by their comma-separated names]' '--no-clean[Do not clean the target directory]' {-C,--config-setting}'[Pass options to the backend. options with a value must be specified after "=": "--config-setting=--opt(=value)" or "-C--opt(=value)"]' "--no-isolation[do not isolate the build in a clean environment]" @@ -106,7 +109,7 @@ _pdm() { args) case $words[1] in clear) - compadd -X type 'hashes' 'http' 'wheels' 'metadata' && ret=0 + compadd -X type 'hashes' 'http' 'wheels' 'metadata' 'packages' && ret=0 ;; *) _message "pattern" && ret=0 @@ -166,6 +169,7 @@ _pdm() { arguments+=( {-g,--global}'[Use the global project, supply the project root with `-p` option]' {-n,--non-interactive}"[Don't ask questions but use default values]" + {-k,--skip}'[Skip some tasks and/or hooks by their comma-separated names]' ) ;; install) @@ -175,6 +179,7 @@ _pdm() { {-d,--dev}"[Select dev dependencies]" {-L,--lockfile}'[Specify another lockfile path, or use `PDM_LOCKFILE` env variable. Default: pdm.lock]:lockfile:_files' {--prod,--production}"[Unselect dev dependencies]" + {-k,--skip}'[Skip some tasks and/or hooks by their comma-separated names]' "--no-lock[Don't do lock if lock file is not found or outdated]" "--no-default[Don\'t include dependencies from the default group]" '--no-editable[Install non-editable versions for all packages]' @@ -198,6 +203,7 @@ _pdm() { {-g,--global}'[Use the global project, supply the project root with `-p` option]' {-L,--lockfile}'[Specify another lockfile path, or use `PDM_LOCKFILE` env variable. Default: pdm.lock]:lockfile:_files' "--no-isolation[Do not isolate the build in a clean environment]" + {-k,--skip}'[Skip some tasks and/or hooks by their comma-separated names]' "--refresh[Don't update pinned versions, only refresh the lock file]" ) ;; @@ -237,12 +243,25 @@ _pdm() { esac return $ret ;; + publish) + arguments+=( + {-r,--repository}'[The repository name or url to publish the package to }[env var: PDM_PUBLISH_REPO]]:repository:' + {-u,--username}'[The username to access the repository [env var: PDM_PUBLISH_USERNAME]]:username:' + {-P,--password}'[The password to access the repository [env var: PDM_PUBLISH_PASSWORD]]:password:' + {-S,--sign}'[Upload the package with PGP signature]' + {-i,--identity}'[GPG identity used to sign files.]:gpg identity:' + {-k,--skip}'[Skip some tasks and/or hooks by their comma-separated names]' + {-c,--comment}'[The comment to include with the distribution file.]:comment:' + "--no-build[Don't build the package before publishing]" + ) + ;; remove) arguments+=( {-g,--global}'[Use the global project, supply the project root with `-p` option]' {-G,--group}'[Specify the target dependency group to remove from]:group:_pdm_groups' {-d,--dev}"[Remove packages from dev dependencies]" {-L,--lockfile}'[Specify another lockfile path, or use `PDM_LOCKFILE` env variable. Default: pdm.lock]:lockfile:_files' + {-k,--skip}'[Skip some tasks and/or hooks by their comma-separated names]' "--no-sync[Only write pyproject.toml and do not uninstall packages]" '--no-editable[Install non-editable versions for all packages]' "--no-self[Don't install the project itself]" @@ -255,6 +274,7 @@ _pdm() { _arguments -s \ {-g,--global}'[Use the global project, supply the project root with `-p` option]' \ {-l,--list}'[Show all available scripts defined in pyproject.toml]' \ + {-k,--skip}'[Skip some tasks and/or hooks by their comma-separated names]' \ {-s,--site-packages}'[Load site-packages from the selected interpreter]' \ '(-)1:command:->command' \ '*:arguments: _normal ' && return 0 @@ -289,6 +309,7 @@ _pdm() { {-d,--dev}"[Select dev dependencies]" {-L,--lockfile}'[Specify another lockfile path, or use `PDM_LOCKFILE` env variable. Default: pdm.lock]:lockfile:_files' {--prod,--production}"[Unselect dev dependencies]" + {-k,--skip}'[Skip some tasks and/or hooks by their comma-separated names]' '--dry-run[Only prints actions without actually running them]' {-r,--reinstall}"[Force reinstall existing dependencies]" '--clean[Clean unused packages]' @@ -314,6 +335,7 @@ _pdm() { '--no-editable[Install non-editable versions for all packages]' "--no-self[Don't install the project itself]" "--no-sync[Only update lock file but do not sync packages]" + {-k,--skip}'[Skip some tasks and/or hooks by their comma-separated names]' {-u,--unconstrained}'[Ignore the version constraint of packages]' {--pre,--prerelease}'[Allow prereleases to be pinned]' {-d,--dev}'[Select dev dependencies]' @@ -363,13 +385,18 @@ _pdm_groups() { _get_packages_with_python() { command ${PDM_PYTHON} - << EOF -import os, re, tomli +import sys +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib +import os, re PACKAGE_REGEX = re.compile(r'^[A-Za-z][A-Za-z0-9._-]*') def get_packages(lines): return [PACKAGE_REGEX.match(line).group() for line in lines] with open('pyproject.toml', 'rb') as f: - data = tomli.load(f) + data = tomllib.load(f) packages = get_packages(data.get('project', {}).get('dependencies', [])) for reqs in data.get('project', {}).get('optional-dependencies', {}).values(): packages.extend(get_packages(reqs)) diff --git a/pdm/cli/hooks.py b/pdm/cli/hooks.py new file mode 100644 index 0000000000..7189b49d3f --- /dev/null +++ b/pdm/cli/hooks.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +from typing import Any + +from blinker import Signal + +from pdm import signals +from pdm.project.core import Project +from pdm.utils import cached_property + +KNOWN_HOOKS = tuple( + name for name, obj in signals.__dict__.items() if isinstance(obj, Signal) +) + + +class HookManager: + projet: Project + skip: list[str] + + def __init__(self, project: Project, skip: list[str] | None = None): + self.project = project + self.skip = skip or [] + + @cached_property + def skip_all(self) -> bool: + return ":all" in self.skip + + @cached_property + def skip_pre(self) -> bool: + return ":pre" in self.skip + + @cached_property + def skip_post(self) -> bool: + return ":post" in self.skip + + def should_run(self, name: str) -> bool: + """ + Tells wether a task given its name should run or not + according to the current skipping rules. + """ + return ( + not self.skip_all + and name not in self.skip + and not (self.skip_pre and name.startswith("pre_")) + and not (self.skip_post and name.startswith("post_")) + ) + + def try_emit(self, name: str, **kwargs: Any) -> None: + """ + Emit a hook signal if rules allow it. + """ + if self.should_run(name): + getattr(signals, name).send(self.project, hooks=self, **kwargs) diff --git a/pdm/cli/options.py b/pdm/cli/options.py index fb65dd82c5..e3bf36827f 100644 --- a/pdm/cli/options.py +++ b/pdm/cli/options.py @@ -4,9 +4,10 @@ import os from typing import Any, Callable, Sequence -from click import secho +from pdm.compat import Protocol +from pdm.termui import UI -from pdm._types import Protocol +ui = UI() class ActionCallback(Protocol): @@ -90,12 +91,47 @@ def deprecated(message: str, type_: type = str) -> Callable[[Any], Any]: """Prints deprecation message for the argument""" def wrapped_type(obj: Any) -> Any: - secho(f"DEPRECATED: {message}", fg="red", err=True) + ui.echo(f"DEPRECATED: {message}", style="red", err=True) return type_(obj) return wrapped_type +def split_lists(separator: str) -> type[argparse.Action]: + """ + Works the same as `append` except each argument + is considered a `separator`-separated list. + """ + + class SplitList(argparse.Action): + def __call__( + self, + parser: argparse.ArgumentParser, + args: argparse.Namespace, + values: Any, + option_string: str | None = None, + ) -> None: + if not isinstance(values, str): + return + splitted = getattr(args, self.dest) or [] + splitted.extend( + value.strip() for value in values.split(separator) if value.strip() + ) + setattr(args, self.dest, splitted) + + return SplitList + + +def from_splitted_env(name: str, separator: str) -> list[str] | None: + """ + Parse a `separator`-separated list from a `name` environment variable if present. + """ + value = os.getenv(name) + if not value: + return None + return [v.strip() for v in value.split(separator) if v.strip()] or None + + verbose_option = Option( "-v", "--verbose", @@ -116,8 +152,7 @@ def wrapped_type(obj: Any) -> Any: "-L", "--lockfile", default=os.getenv("PDM_LOCKFILE"), - help="Specify another lockfile path, or use `PDM_LOCKFILE` env variable. " - "Default: pdm.lock", + help="Specify another lockfile path. Default: pdm.lock. [env var: PDM_LOCKFILE]", ) pep582_option = Option( @@ -230,6 +265,18 @@ def no_isolation_callback( help="Save minimum version specifiers", ) +skip_option = Option( + "-k", + "--skip", + dest="skip", + action=split_lists(","), + help="Skip some tasks and/or hooks by their comma-separated names." + " Can be supplied multiple times." + ' Use ":all" to skip all hooks.' + ' Use ":pre" and ":post" to skip all pre or post hooks.', + default=from_splitted_env("PDM_SKIP_HOOKS", ","), +) + update_strategy_group = ArgumentGroup("update_strategy", is_mutually_exclusive=True) update_strategy_group.add_argument( "--update-reuse", diff --git a/pdm/cli/utils.py b/pdm/cli/utils.py index f0c12ce9bf..91e4f71c15 100644 --- a/pdm/cli/utils.py +++ b/pdm/cli/utils.py @@ -6,6 +6,7 @@ import sys from argparse import Action, _ArgumentGroup from collections import ChainMap, OrderedDict +from concurrent.futures import ThreadPoolExecutor from json import dumps from pathlib import Path from typing import ( @@ -23,13 +24,13 @@ from packaging.specifiers import SpecifierSet from packaging.version import parse as parse_version from resolvelib.structs import DirectedGraph +from rich.tree import Tree from pdm import termui -from pdm._types import Distribution from pdm.exceptions import PdmUsageError, ProjectError from pdm.formats import FORMATS from pdm.formats.base import make_array, make_inline_table -from pdm.models.pip_shims import url_to_path +from pdm.models.repositories import BaseRepository from pdm.models.requirements import ( Requirement, filter_requirements_with_extras, @@ -39,18 +40,19 @@ from pdm.models.specifiers import get_specifier from pdm.models.working_set import WorkingSet from pdm.project import Project -from pdm.utils import is_path_relative_to +from pdm.utils import is_path_relative_to, url_to_path if TYPE_CHECKING: from resolvelib.resolvers import RequirementInformation, ResolutionImpossible + from pdm.compat import Distribution from pdm.models.candidates import Candidate -class PdmFormatter(argparse.HelpFormatter): +class PdmFormatter(argparse.RawDescriptionHelpFormatter): def start_section(self, heading: str | None) -> None: return super().start_section( - termui.yellow(heading.title() if heading else heading, bold=True) + termui.style(heading.title() if heading else "", style="bold yellow") ) def _format_usage( @@ -64,7 +66,7 @@ def _format_usage( prefix = "Usage: " result = super()._format_usage(usage, actions, groups, prefix) if prefix: - return result.replace(prefix, termui.yellow(prefix, bold=True)) + return result.replace(prefix, termui.style(prefix, style="bold yellow")) return result def _format_action(self, action: Action) -> str: @@ -92,7 +94,7 @@ def _format_action(self, action: Action) -> str: indent_first = help_position # collect the pieces of the action help - parts = [termui.cyan(action_header)] + parts = [termui.style(action_header, style="cyan")] # if there was help for the action, add lines of help text if action.help: @@ -191,81 +193,65 @@ def add_package(key: str, dist: Distribution | None) -> Package: return graph -LAST_CHILD = "└── " -LAST_PREFIX = " " -NON_LAST_CHILD = "├── " -NON_LAST_PREFIX = "│ " - - def specifier_from_requirement(requirement: Requirement) -> str: return str(requirement.specifier or "Any") -def format_package( +def add_package_to_tree( + root: Tree, graph: DirectedGraph, package: Package, required: str = "", - prefix: str = "", visited: frozenset[str] = frozenset(), -) -> str: +) -> None: """Format one package. :param graph: the dependency graph :param package: the package instance :param required: the version required by its parent - :param prefix: prefix text for children :param visited: the visited package collection """ - result = [] version = ( - termui.red("[ not installed ]") + "[red][ not installed ][/]" if not package.version - else termui.red(package.version) + else f"[red]{package.version}[/]" if required and required not in ("Any", "This project") and not SpecifierSet(required).contains(package.version) - else termui.yellow(package.version) + else f"[yellow]{package.version}[/]" ) + # escape deps with extras + name = package.name.replace("[", r"\[") if "[" in package.name else package.name if package.name in visited: - version = termui.red("[circular]") + version = r"[red]\[circular][/]" required = f"[ required: {required} ]" if required else "[ Not required ]" - result.append(f"{termui.green(package.name, bold=True)} {version} {required}\n") + node = root.add(f"[bold green]{name}[/] {version} {required}") if package.name in visited: - return "".join(result) + return children = sorted(graph.iter_children(package), key=lambda p: p.name) - for i, child in enumerate(children): - is_last = i == len(children) - 1 - head = LAST_CHILD if is_last else NON_LAST_CHILD - cur_prefix = LAST_PREFIX if is_last else NON_LAST_PREFIX + for child in children: required = specifier_from_requirement(package.requirements[child.name]) - result.append( - prefix - + head - + format_package( - graph, child, required, prefix + cur_prefix, visited | {package.name} - ) - ) - return "".join(result) + add_package_to_tree(node, graph, child, required, visited | {package.name}) -def format_reverse_package( +def add_package_to_reverse_tree( + root: Tree, graph: DirectedGraph, package: Package, child: Package | None = None, requires: str = "", - prefix: str = "", visited: frozenset[str] = frozenset(), -) -> str: +) -> None: """Format one package for output reverse dependency graph.""" version = ( - termui.red("[ not installed ]") + "[red][ not installed ][/]" if not package.version - else termui.yellow(package.version) + else f"[yellow]{package.version}[/]" ) if package.name in visited: - version = termui.red("[circular]") + version = r"[red]\[circular][/]" requires = ( - f"[ requires: {termui.red(requires)} ]" + f"[ requires: [red]{requires}[/] ]" if requires not in ("Any", "") and child and child.version @@ -274,30 +260,20 @@ def format_reverse_package( if not requires else f"[ requires: {requires} ]" ) - result = [f"{termui.green(package.name, bold=True)} {version} {requires}\n"] + name = package.name.replace("[", r"\[") if "[" in package.name else package.name + node = root.add(f"[bold green]{name}[/] {version} {requires}") + if package.name in visited: - return "".join(result) + return parents: list[Package] = sorted( filter(None, graph.iter_parents(package)), key=lambda p: p.name ) - for i, parent in enumerate(parents): - is_last = i == len(parents) - 1 - head = LAST_CHILD if is_last else NON_LAST_CHILD - cur_prefix = LAST_PREFIX if is_last else NON_LAST_PREFIX + for parent in parents: requires = specifier_from_requirement(parent.requirements[package.name]) - result.append( - prefix - + head - + format_reverse_package( - graph, - parent, - package, - requires, - prefix + cur_prefix, - visited | {package.name}, - ) + add_package_to_reverse_tree( + node, graph, parent, package, requires, visited=visited | {package.name} ) - return "".join(result) + return def package_is_project(package: Package, project: Project) -> bool: @@ -308,9 +284,9 @@ def package_is_project(package: Package, project: Project) -> bool: ) -def _format_forward_dependency_graph(project: Project, graph: DirectedGraph) -> str: +def _format_forward_dependency_graph(project: Project, graph: DirectedGraph) -> Tree: """Format dependency graph for output.""" - content = [] + root = Tree("Dependencies", hide_root=True) all_dependencies = ChainMap(*project.all_dependencies.values()) top_level_dependencies = sorted(graph.iter_children(None), key=lambda p: p.name) for package in top_level_dependencies: @@ -320,22 +296,24 @@ def _format_forward_dependency_graph(project: Project, graph: DirectedGraph) -> required = "This project" else: required = "" - content.append(format_package(graph, package, required, "")) - return "".join(content).strip() + add_package_to_tree(root, graph, package, required) + return root def _format_reverse_dependency_graph( project: Project, graph: DirectedGraph[Package | None] -) -> str: +) -> Tree: """Format reverse dependency graph for output.""" + root = Tree("Dependencies", hide_root=True) leaf_nodes = sorted( (node for node in graph if not list(graph.iter_children(node)) and node), key=lambda p: p.name, ) - content = [ - format_reverse_package(graph, node, prefix="") for node in leaf_nodes if node - ] - return "".join(content).strip() + for package in leaf_nodes: + if not package: + continue + add_package_to_reverse_tree(root, graph, package) + return root def build_forward_dependency_json_subtree( @@ -422,22 +400,27 @@ def build_dependency_json_tree( ] -def format_dependency_graph( +def show_dependency_graph( project: Project, graph: DirectedGraph[Package | None], reverse: bool = False, json: bool = False, -) -> str: +) -> None: + echo = project.core.ui.echo if json: - return dumps( - build_dependency_json_tree(project, graph, reverse), - indent=2, + echo( + dumps( + build_dependency_json_tree(project, graph, reverse), + indent=2, + ) ) + return if reverse: - return _format_reverse_dependency_graph(project, graph) + tree = _format_reverse_dependency_graph(project, graph) else: - return _format_forward_dependency_graph(project, graph) + tree = _format_forward_dependency_graph(project, graph) + echo(tree) def format_lockfile( @@ -463,7 +446,7 @@ def format_lockfile( if key in file_hashes: continue array = tomlkit.array().multiline(True) - for filename, hash_value in v.hashes.items(): + for filename, hash_value in sorted(v.hashes.items()): inline = make_inline_table({"file": filename, "hash": hash_value}) array.append(inline) # type: ignore if array: @@ -508,7 +491,7 @@ def check_project_file(project: Project) -> None: if not project.meta: raise ProjectError( "The pyproject.toml has not been initialized yet. You can do this " - "by running {}.".format(termui.green("'pdm init'")) + "by running [green]`pdm init`[/]." ) @@ -519,6 +502,7 @@ def find_importable_files(project: Project) -> Iterable[tuple[str, Path]]: "pyproject.toml", "requirements.in", "requirements.txt", + "setup.py", ): project_file = project.root / filename if not project_file.exists(): @@ -568,7 +552,7 @@ def format_resolution_impossible(err: ResolutionImpossible) -> str: result = [ "Unable to find a resolution because the following dependencies don't work " "on all Python versions defined by the project's `requires-python`: " - f"{termui.green(str(project_requires.specifier))}" + f"[green]{str(project_requires.specifier)}[/]." ] for req, parent in conflicting: info_lines.add(f" {req.as_line()} (from {repr(parent)})") @@ -582,14 +566,14 @@ def format_resolution_impossible(err: ResolutionImpossible) -> str: if len(causes) == 1: return ( "Unable to find a resolution for " - f"{termui.green(causes[0].requirement.identify())}\n" + f"[green]{causes[0].requirement.identify()}[/]\n" "Please make sure the package name is correct." ) result = [ - f"Unable to find a resolution for " - f"{termui.green(causes[0].requirement.identify())} because of the following " - "conflicts:" + "Unable to find a resolution for " + f"[green]{causes[0].requirement.identify()}[/]\n" + "because of the following conflicts:" ] for req, parent in causes: info_lines.add( @@ -630,7 +614,8 @@ def translate_groups( invalid_groups = groups_set - set(project.iter_groups()) if invalid_groups: project.core.ui.echo( - f"Ignoring non-existing groups: {invalid_groups}", fg="yellow", err=True + f"[d]Ignoring non-existing groups: [green]{', '.join(invalid_groups)}[/]", + err=True, ) groups_set -= invalid_groups return sorted(groups_set) @@ -653,6 +638,16 @@ def merge_dictionary( target[key] = value +def fetch_hashes(repository: BaseRepository, mapping: Mapping[str, Candidate]) -> None: + """Fetch hashes for candidates in parallel""" + + def do_fetch(candidate: Candidate) -> None: + candidate.hashes = repository.get_hashes(candidate) + + with ThreadPoolExecutor() as executor: + executor.map(do_fetch, mapping.values()) + + def is_pipx_installation() -> bool: return sys.prefix.split(os.sep)[-3:-1] == ["pipx", "venvs"] diff --git a/pdm/compat.py b/pdm/compat.py new file mode 100644 index 0000000000..b4ff91e475 --- /dev/null +++ b/pdm/compat.py @@ -0,0 +1,26 @@ +import sys + +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib + + +if sys.version_info >= (3, 8): + import importlib.metadata as importlib_metadata + from typing import Literal, Protocol, TypedDict +else: + import importlib_metadata + from typing_extensions import Literal, Protocol, TypedDict + +Distribution = importlib_metadata.Distribution + + +__all__ = [ + "tomllib", + "importlib_metadata", + "Literal", + "Protocol", + "TypedDict", + "Distribution", +] diff --git a/pdm/core.py b/pdm/core.py index 5f2e85d895..5478c1ba32 100644 --- a/pdm/core.py +++ b/pdm/core.py @@ -1,3 +1,10 @@ +r""" + ____ ____ __ ___ + / __ \/ __ \/ |/ / + / /_/ / / / / /|_/ / + / ____/ /_/ / / / / +/_/ /_____/_/ /_/ +""" from __future__ import annotations import argparse @@ -9,7 +16,6 @@ from pathlib import Path from typing import Any, List, Optional, Type, cast -import click from resolvelib import Resolver from pdm import termui @@ -18,17 +24,13 @@ from pdm.cli.commands.base import BaseCommand from pdm.cli.options import ignore_python_option, pep582_option, verbose_option from pdm.cli.utils import PdmFormatter +from pdm.compat import importlib_metadata from pdm.exceptions import PdmUsageError from pdm.installers import InstallManager, Synchronizer from pdm.models.repositories import PyPIRepository from pdm.project import Project from pdm.project.config import Config, ConfigItem -if sys.version_info >= (3, 8): - import importlib.metadata as importlib_metadata -else: - import importlib_metadata - COMMANDS_MODULE_PATH: str = importlib.import_module( "pdm.cli.commands" ).__path__ # type: ignore @@ -55,7 +57,7 @@ def __init__(self) -> None: def init_parser(self) -> None: self.parser = argparse.ArgumentParser( prog="pdm", - description="PDM - Python Development Master", + description=__doc__, formatter_class=PdmFormatter, ) self.parser.is_root = True # type: ignore @@ -64,7 +66,8 @@ def init_parser(self) -> None: "--version", action="version", version="{}, version {}".format( - click.style("Python Development Master (PDM)", bold=True), self.version + termui.style("PDM", style="bold"), + self.version, ), help="show the version and exit", ) @@ -138,9 +141,24 @@ def main( **extra: Any, ) -> None: """The main entry function""" - from pdm.models.pip_shims import global_tempdir_manager + # Ensure same behavior while testing and using the CLI + args = args or sys.argv[1:] + # Keep it for after project parsing to check if its a defined script + root_script = None + try: + options = self.parser.parse_args(args) + except SystemExit as e: + # Failed to parse, try to give all to `run` command as shortcut + # and keep to root script (first non-dashed param) to check existence + # as soon as the project is parsed + root_script = next((arg for arg in args if not arg.startswith("-")), None) + if not root_script: + raise + try: + options = self.parser.parse_args(["run", *args]) + except SystemExit: + raise e - options = self.parser.parse_args(args or None) self.ui.set_verbosity(options.verbose) if options.ignore_python: os.environ["PDM_IGNORE_SAVED_PYTHON"] = "1" @@ -151,27 +169,31 @@ def main( self.ensure_project(options, obj) + if root_script and root_script not in options.project.scripts: + self.parser.error(f"Command unknown: {root_script}") + try: f = options.handler except AttributeError: - self.parser.print_help() + self.parser.print_help(sys.stderr) sys.exit(1) else: try: - with global_tempdir_manager(): - f(options.project, options) + f(options.project, options) except Exception: etype, err, traceback = sys.exc_info() should_show_tb = not isinstance(err, PdmUsageError) - if self.ui.verbosity > termui.NORMAL and should_show_tb: + if self.ui.verbosity > termui.Verbosity.NORMAL and should_show_tb: raise cast(Exception, err).with_traceback(traceback) self.ui.echo( - f"{termui.red('[' + etype.__name__ + ']')}: {err}", # type: ignore + rf"[red]\[{etype.__name__}][/]: {err}", # type: ignore err=True, ) if should_show_tb: self.ui.echo( - "Add '-v' to see the detailed traceback", fg="yellow", err=True + "Add '-v' to see the detailed traceback", + style="yellow", + err=True, ) sys.exit(1) else: @@ -222,7 +244,7 @@ def my_plugin(core: pdm.core.Core) -> None: except Exception as e: self.ui.echo( f"Failed to load plugin {plugin.name}={plugin.value}: {e}", - fg="red", + style="red", err=True, ) diff --git a/pdm/exceptions.py b/pdm/exceptions.py index 6fd28fb809..72b003499a 100644 --- a/pdm/exceptions.py +++ b/pdm/exceptions.py @@ -2,8 +2,6 @@ from typing import TYPE_CHECKING, List -from pdm import termui - if TYPE_CHECKING: from pdm.models.candidates import Candidate @@ -16,15 +14,15 @@ class PdmUsageError(PdmException): pass -class RequirementError(PdmException, ValueError): +class RequirementError(PdmUsageError, ValueError): pass -class InvalidPyVersion(PdmException, ValueError): +class PublishError(PdmUsageError): pass -class CorruptedCacheError(PdmException): +class InvalidPyVersion(PdmUsageError, ValueError): pass @@ -35,8 +33,7 @@ class CandidateNotFound(PdmException): class CandidateInfoNotFound(PdmException): def __init__(self, candidate: Candidate) -> None: message = ( - "No metadata information is available for " - f"{termui.green(str(candidate))}." + "No metadata information is available for " f"[green]{str(candidate)}[/]." ) self.candidate = candidate super().__init__(message) @@ -60,12 +57,12 @@ class UninstallError(PdmException): pass -class NoConfigError(PdmException, KeyError): +class NoConfigError(PdmUsageError, KeyError): def __init__(self, key: str) -> None: super().__init__("No such config item: {}".format(key)) -class NoPythonVersion(PdmException): +class NoPythonVersion(PdmUsageError): pass diff --git a/pdm/formats/__init__.py b/pdm/formats/__init__.py index 1f35b1dc47..8556ed6599 100644 --- a/pdm/formats/__init__.py +++ b/pdm/formats/__init__.py @@ -4,8 +4,8 @@ from pathlib import Path from typing import Iterable, Mapping, Union, cast -from pdm._types import Protocol -from pdm.formats import flit, legacy, pipfile, poetry, requirements, setup_py +from pdm.compat import Protocol +from pdm.formats import flit, pipfile, poetry, requirements, setup_py from pdm.models.candidates import Candidate from pdm.models.requirements import Requirement from pdm.project import Project @@ -36,6 +36,5 @@ def export( "poetry": cast(_Format, poetry), "flit": cast(_Format, flit), "requirements": cast(_Format, requirements), - "legacy": cast(_Format, legacy), "setuppy": cast(_Format, setup_py), } diff --git a/pdm/formats/flit.py b/pdm/formats/flit.py index 5162487646..dd0d51558d 100644 --- a/pdm/formats/flit.py +++ b/pdm/formats/flit.py @@ -5,8 +5,7 @@ from pathlib import Path from typing import Any, Dict, List, Mapping, Optional, Tuple, cast -import tomli - +from pdm.compat import tomllib from pdm.formats.base import ( MetaConverter, Unset, @@ -22,8 +21,8 @@ def check_fingerprint(project: Optional[Project], filename: PathLike) -> bool: with open(filename, "rb") as fp: try: - data = tomli.load(fp) - except tomli.TOMLDecodeError: + data = tomllib.load(fp) + except tomllib.TOMLDecodeError: return False return "tool" in data and "flit" in data["tool"] @@ -82,7 +81,7 @@ def warn_against_dynamic_version_or_docstring( "They are probably imported from other files which is not supported by PDM." " You may need to supply their values in pyproject.toml manually." ) - self._ui.echo(message, err=True, fg="yellow") + self._ui.echo(message, err=True, style="yellow") @convert_from("metadata") def name(self, metadata: Dict[str, Any]) -> str: @@ -135,8 +134,9 @@ def entry_points( @convert_from("sdist") def includes(self, value: Dict[str, List[str]]) -> None: - self.settings["excludes"] = value.get("exclude") - self.settings["includes"] = value.get("include") + self.settings.setdefault("build", {}).update( + {"excludes": value.get("exclude"), "includes": value.get("include")} + ) raise Unset() @@ -145,7 +145,7 @@ def convert( ) -> Tuple[Mapping, Mapping]: with open(filename, "rb") as fp, cd(os.path.dirname(os.path.abspath(filename))): converter = FlitMetaConverter( - tomli.load(fp)["tool"]["flit"], project.core.ui if project else None + tomllib.load(fp)["tool"]["flit"], project.core.ui if project else None ) return converter.convert() diff --git a/pdm/formats/legacy.py b/pdm/formats/legacy.py deleted file mode 100644 index 12af3c8241..0000000000 --- a/pdm/formats/legacy.py +++ /dev/null @@ -1,176 +0,0 @@ -import functools -from argparse import Namespace -from os import PathLike -from pathlib import Path -from typing import Any, Dict, List, Mapping, Optional, Set, Tuple, Union, cast - -import tomli - -from pdm._types import RequirementDict, Source -from pdm.formats.base import ( - MetaConverter, - Unset, - convert_from, - make_array, - make_inline_table, - parse_name_email, -) -from pdm.models.requirements import Requirement -from pdm.project.core import Project - - -def check_fingerprint(project: Project, filename: PathLike) -> bool: - with open(filename, "rb") as fp: - try: - data = tomli.load(fp) - except tomli.TOMLDecodeError: - return False - - return ( - "tool" in data - and "pdm" in data["tool"] - and "dependencies" in data["tool"]["pdm"] - ) - - -class LegacyMetaConverter(MetaConverter): - @convert_from("author") - def authors(self, value: str) -> List[str]: - return cast(List[str], parse_name_email([value])) - - @convert_from("maintainer") - def maintainers(self, value: str) -> List[str]: - return cast(List[str], parse_name_email([value])) - - @convert_from("version") - def version( - self, value: Union[Dict[str, str], List[str], str] - ) -> Union[Dict[str, str], List[str], str]: - if not isinstance(value, str): - self._data.setdefault("dynamic", []).append("version") - return value - - @convert_from("python_requires", name="requires-python") - def requires_python(self, value: str) -> str: - if "classifiers" not in self._data.setdefault("dynamic", []): - self._data["dynamic"].append("classifiers") - return value - - @convert_from("license") - def license(self, value: str) -> Dict[str, str]: - return make_inline_table({"text": value}) - - @convert_from("source") - def sources(self, value: List[Source]) -> None: - self.settings["source"] = value - raise Unset() - - @convert_from("homepage") - def homepage(self, value: str) -> None: - self._data.setdefault("urls", {})["homepage"] = value - raise Unset() - - @convert_from("project_urls") - def urls(self, value: str) -> None: - self._data.setdefault("urls", {}).update(value) - raise Unset() - - @convert_from("dependencies") - def dependencies(self, value: Dict[str, str]) -> List[str]: - return cast( - List[str], - make_array( - [ - Requirement.from_req_dict(name, req).as_line() - for name, req in value.items() - ], - True, - ), - ) - - @convert_from("dev-dependencies") - def dev_dependencies(self, value: Dict[str, RequirementDict]) -> None: - self.settings["dev-dependencies"] = { - "dev": make_array( - [ - Requirement.from_req_dict(name, req).as_line() - for name, req in value.items() - ], - True, - ) - } - raise Unset() - - @convert_from(name="optional-dependencies") - def optional_dependencies(self, source: Dict[str, Any]) -> Dict[str, List[str]]: - extras = {} - for key, reqs in list(source.items()): - if key.endswith("-dependencies") and key != "dev-dependencies": - reqs = cast(Dict[str, RequirementDict], reqs) - extra_key = key.split("-", 1)[0] - extras[extra_key] = [ - Requirement.from_req_dict(name, req).as_line() - for name, req in reqs.items() - ] - source.pop(key) - for name in cast(List[str], source.pop("extras", [])): - if name in extras: - continue - if "=" in name: - key, others = name.split("=", 1) - parts = others.split("|") - extras[key] = list( - functools.reduce( - lambda x, y: cast(Set[str], x).union(extras[y]), - parts, - cast(Set[str], set()), - ) - ) - return extras - - @convert_from("cli") - def scripts(self, value: Dict[str, str]) -> Dict[str, str]: - return dict(value) - - @convert_from("includes") - def includes(self, value: List[str]) -> None: - self.settings["includes"] = value - raise Unset() - - @convert_from("excludes") - def excludes(self, value: List[str]) -> None: - self.settings["excludes"] = value - raise Unset() - - @convert_from("build") - def build(self, value: str) -> None: - self.settings["build"] = value - raise Unset() - - @convert_from("entry_points", name="entry-points") - def entry_points( - self, value: Dict[str, Dict[str, str]] - ) -> Dict[str, Dict[str, str]]: - return dict(value) - - @convert_from("scripts") - def run_scripts(self, value: str) -> None: - self.settings["scripts"] = value - raise Unset() - - @convert_from("allow_prereleases") - def allow_prereleases(self, value: bool) -> None: - self.settings["allow_prereleases"] = value - raise Unset() - - -def convert( - project: Project, filename: Path, options: Optional[Namespace] -) -> Tuple[Mapping[str, Any], Mapping[str, Any]]: - with open(filename, "rb") as fp: - converter = LegacyMetaConverter(tomli.load(fp)["tool"]["pdm"], project.core.ui) - return converter.convert() - - -def export(project: Project, candidates: List, options: Optional[Any]) -> None: - raise NotImplementedError() diff --git a/pdm/formats/pipfile.py b/pdm/formats/pipfile.py index 393eeb6cbe..b7155b7259 100644 --- a/pdm/formats/pipfile.py +++ b/pdm/formats/pipfile.py @@ -7,10 +7,10 @@ from os import PathLike from typing import Any -import tomli from packaging.markers import default_environment from pdm._types import RequirementDict +from pdm.compat import tomllib from pdm.formats.base import make_array from pdm.models.markers import Marker from pdm.models.requirements import Requirement @@ -45,7 +45,7 @@ def convert( project: Project, filename: PathLike, options: Namespace | None ) -> tuple[dict[str, Any], dict[str, Any]]: with open(filename, "rb") as fp: - data = tomli.load(fp) + data = tomllib.load(fp) result = {} settings = {} if "pipenv" in data: diff --git a/pdm/formats/poetry.py b/pdm/formats/poetry.py index 09f3a14eaf..e09fd680fa 100644 --- a/pdm/formats/poetry.py +++ b/pdm/formats/poetry.py @@ -6,11 +6,10 @@ import re from argparse import Namespace from pathlib import Path -from typing import TYPE_CHECKING, Any, Mapping - -import tomli +from typing import TYPE_CHECKING, Any, Mapping, cast from pdm._types import RequirementDict, Source +from pdm.compat import tomllib from pdm.formats.base import ( MetaConverter, Unset, @@ -32,8 +31,8 @@ def check_fingerprint(project: Project | None, filename: Path | str) -> bool: with open(filename, "rb") as fp: try: - data = tomli.load(fp) - except tomli.TOMLDecodeError: + data = tomllib.load(fp) + except tomllib.TOMLDecodeError: return False return "tool" in data and "poetry" in data["tool"] @@ -166,20 +165,24 @@ def includes(self, source: dict[str, list[str] | str]) -> list[str]: include = f"{item.get('from')}/{include}" result.append(include) result.extend(source.pop("include", [])) - self.settings["includes"] = result + self.settings.setdefault("build", {})["includes"] = result raise Unset() @convert_from("exclude") def excludes(self, value: list[str]) -> None: - self.settings["excludes"] = value + self.settings.setdefault("build", {})["excludes"] = value raise Unset() @convert_from("build") def build(self, value: str | dict) -> None: - if isinstance(value, str): - self.settings["build"] = value - elif "script" in value: - self.settings["build"] = value.get("script") + run_setuptools = True + if isinstance(value, dict): + if "generate-setup-file" in value: + run_setuptools = cast(bool, value["generate-setup-file"]) + value = value["script"] + self.settings.setdefault("build", {}).update( + {"setup-script": value, "run-setuptools": run_setuptools} + ) raise Unset() @convert_from("source") @@ -202,7 +205,7 @@ def convert( ) -> tuple[Mapping[str, Any], Mapping[str, Any]]: with open(filename, "rb") as fp, cd(os.path.dirname(os.path.abspath(filename))): converter = PoetryMetaConverter( - tomli.load(fp)["tool"]["poetry"], project.core.ui if project else None + tomllib.load(fp)["tool"]["poetry"], project.core.ui if project else None ) return converter.convert() diff --git a/pdm/formats/requirements.py b/pdm/formats/requirements.py index 2fc9074722..77ae282666 100644 --- a/pdm/formats/requirements.py +++ b/pdm/formats/requirements.py @@ -1,87 +1,97 @@ from __future__ import annotations +import argparse import dataclasses import hashlib +import shlex import urllib.parse from argparse import Namespace from os import PathLike -from typing import Any, Mapping +from typing import TYPE_CHECKING, Any, Mapping, cast from pdm.formats.base import make_array from pdm.models.candidates import Candidate -from pdm.models.environment import Environment -from pdm.models.pip_shims import InstallRequirement, PackageFinder, parse_requirements from pdm.models.requirements import Requirement, parse_requirement from pdm.project import Project -from pdm.utils import expand_env_vars_in_auth, get_finder +from pdm.utils import expand_env_vars_in_auth +if TYPE_CHECKING: + from pdm._types import Source -def _requirement_to_str_lowercase_name(requirement: InstallRequirement) -> str: - """Formats a packaging.requirements.Requirement with a lowercase name.""" - assert requirement.name - parts = [requirement.name.lower()] - if requirement.extras: - parts.append("[{0}]".format(",".join(sorted(requirement.extras)))) - - if requirement.specifier: - parts.append(str(requirement.specifier)) - - if requirement.link: - parts.append("@ {0}".format(requirement.link.url_without_fragment)) - if requirement.link.subdirectory_fragment: - parts.append( - "#subdirectory={0}".format(requirement.link.subdirectory_fragment) - ) - - return "".join(parts) - - -def ireq_as_line(ireq: InstallRequirement, environment: Environment) -> str: - """Formats an `InstallRequirement` instance as a - PEP 508 dependency string. - - Generic formatter for pretty printing InstallRequirements to the terminal - in a less verbose way than using its `__str__` method. - - :param :class:`InstallRequirement` ireq: A pip **InstallRequirement** instance. - :return: A formatted string for prettyprinting - :rtype: str +class RequirementParser: + """Reference: + https://pip.pypa.io/en/stable/reference/requirements-file-format/ """ - if ireq.editable: - line = "-e {}".format(ireq.link) - else: - if not ireq.req: - req = parse_requirement("dummy @" + ireq.link.url) # type: ignore - req.name = Candidate(req).prepare(environment).metadata.metadata["Name"] - ireq.req = req # type: ignore - - line = _requirement_to_str_lowercase_name(ireq) - if ireq.markers: - line = f"{line}; {ireq.markers}" - - return line - -def parse_requirement_file( - filename: str, -) -> tuple[list[InstallRequirement], PackageFinder]: - from pdm.models.pip_shims import install_req_from_parsed_requirement - - finder = get_finder([]) - ireqs = [ - install_req_from_parsed_requirement(pr) - for pr in parse_requirements(filename, finder.session, finder) # type: ignore - ] - return ireqs, finder + # TODO: support no_binary, only_binary, prefer_binary, pre and no_index + + def __init__(self) -> None: + self.requirements: list[Requirement] = [] + self.index_url: str | None = None + self.extra_index_urls: list[str] = [] + self.no_index: bool = False + self.find_links: list[str] = [] + self.trusted_hosts: list[str] = [] + parser = argparse.ArgumentParser() + parser.add_argument("--index-url", "-i") + parser.add_argument("--no-index", action="store_true") + parser.add_argument("--extra-index-url") + parser.add_argument("--find-links", "-f") + parser.add_argument("--trusted-host") + parser.add_argument("-e", "--editable", nargs="+") + parser.add_argument("-r", "--requirement") + self._parser = parser + + def _clean_line(self, line: str) -> str: + """Strip the surrounding whitespaces and comment from the line""" + line = line.strip() + if line.startswith("#"): + return "" + return line.split(" #", 1)[0].strip() + + def _parse_line(self, line: str) -> None: + if not line.startswith("-"): + # Starts with a requirement, just ignore all per-requirement options + req_string = line.split(" -", 1)[0].strip() + self.requirements.append(parse_requirement(req_string)) + return + args, _ = self._parser.parse_known_args(shlex.split(line)) + if args.index_url: + self.index_url = args.index_url + if args.no_index: + self.no_index = args.no_index + if args.extra_index_url: + self.extra_index_urls.append(args.extra_index_url) + if args.find_links: + self.find_links.append(args.find_links) + if args.trusted_host: + self.trusted_hosts.append(args.trusted_host) + if args.editable: + self.requirements.append(parse_requirement(" ".join(args.editable), True)) + if args.requirement: + self.parse(args.requirement) + + def parse(self, filename: str) -> None: + with open(filename, encoding="utf-8") as f: + this_line = "" + for line in filter(None, map(self._clean_line, f)): + if line.endswith("\\"): + this_line += line[:-1].rstrip() + " " + continue + this_line += line + self._parse_line(this_line) + this_line = "" + if this_line: + self._parse_line(this_line) def check_fingerprint(project: Project, filename: PathLike) -> bool: - import tomli + from pdm.compat import tomllib with open(filename, "rb") as fp: try: - tomli.load(fp) + tomllib.load(fp) except ValueError: # the file should be a requirements.txt if it not a TOML document. return True @@ -89,20 +99,38 @@ def check_fingerprint(project: Project, filename: PathLike) -> bool: return False -def convert_url_to_source(url: str, name: str | None = None) -> dict[str, Any]: +def _is_url_trusted(url: str, trusted_hosts: list[str]) -> bool: + parsed = urllib.parse.urlparse(url) + netloc, host = parsed.netloc, parsed.hostname + + for trusted in trusted_hosts: + if trusted in (host, netloc): + return True + return False + + +def convert_url_to_source( + url: str, name: str | None, trusted_hosts: list[str], type: str = "index" +) -> Source: if not name: name = hashlib.sha1(url.encode("utf-8")).hexdigest()[:6] - return {"name": name, "url": url, "verify_ssl": url.startswith("https://")} + source = { + "name": name, + "url": url, + "verify_ssl": not _is_url_trusted(url, trusted_hosts), + } + if type != "index": + source["type"] = type + return cast("Source", source) def convert( project: Project, filename: PathLike, options: Namespace ) -> tuple[Mapping[str, Any], Mapping[str, Any]]: - ireqs, finder = parse_requirement_file(str(filename)) - with project.core.ui.logging("build"): - reqs = [ireq_as_line(ireq, project.environment) for ireq in ireqs] + parser = RequirementParser() + parser.parse(str(filename)) - deps = make_array(reqs, True) + deps = make_array([r.as_line() for r in parser.requirements], True) data: dict[str, Any] = {} settings: dict[str, Any] = {} if options.dev: @@ -111,11 +139,31 @@ def convert( data["optional-dependencies"] = {options.group: deps} else: data["dependencies"] = deps - if finder.index_urls: - sources = [convert_url_to_source(finder.index_urls[0], "pypi")] - sources.extend(convert_url_to_source(url) for url in finder.index_urls[1:]) - settings["source"] = sources + sources: list[Source] = [] + if parser.index_url and not parser.no_index: + sources.append( + convert_url_to_source(parser.index_url, "pypi", parser.trusted_hosts) + ) + if not parser.no_index: + for url in parser.extra_index_urls: + sources.append(convert_url_to_source(url, None, parser.trusted_hosts)) + if parser.find_links: + first, *find_links = parser.find_links + sources.append( + convert_url_to_source( + first, + "pypi" if parser.no_index else None, + parser.trusted_hosts, + "find_links", + ) + ) + for url in find_links: + sources.append( + convert_url_to_source(url, None, parser.trusted_hosts, "find_links") + ) + if sources: + settings["source"] = sources return data, settings diff --git a/pdm/formats/setup_py.py b/pdm/formats/setup_py.py index cc0adc1b50..068471a809 100644 --- a/pdm/formats/setup_py.py +++ b/pdm/formats/setup_py.py @@ -1,7 +1,8 @@ import os from pathlib import Path -from typing import Any, List, Optional +from typing import Any, Dict, List, Mapping, Optional, Tuple +from pdm.formats.base import array_of_inline_tables, make_array, make_inline_table from pdm.project import Project @@ -9,8 +10,54 @@ def check_fingerprint(project: Project, filename: Path) -> bool: return os.path.basename(filename) == "setup.py" -def convert(project: Project, filename: Path, options: Optional[Any]) -> None: - raise NotImplementedError() +def convert( + project: Project, filename: Path, options: Optional[Any] +) -> Tuple[Mapping[str, Any], Mapping[str, Any]]: + from pdm.models.in_process import parse_setup_py + + parsed = parse_setup_py( + str(project.environment.interpreter.executable), str(filename) + ) + metadata: Dict[str, Any] = {} + settings: Dict[str, Any] = {} + for name in [ + "name", + "version", + "description", + "keywords", + "urls", + "readme", + ]: + if name in parsed: + metadata[name] = parsed[name] + if "authors" in parsed: + metadata["authors"] = array_of_inline_tables(parsed["authors"]) + if "maintainers" in parsed: + metadata["maintainers"] = array_of_inline_tables(parsed["maintainers"]) + if "classifiers" in parsed: + metadata["classifiers"] = make_array(sorted(parsed["classifiers"]), True) + if "python_requires" in parsed: + metadata["requires-python"] = parsed["python_requires"] + if "install_requires" in parsed: + metadata["dependencies"] = make_array(sorted(parsed["install_requires"]), True) + if "extras_require" in parsed: + metadata["optional-dependencies"] = { + k: make_array(sorted(v), True) for k, v in parsed["extras_require"].items() + } + if "license" in parsed: + metadata["license"] = make_inline_table({"text": parsed["license"]}) + if "package_dir" in parsed: + settings["package-dir"] = parsed["package_dir"] + + entry_points = parsed.get("entry_points", {}) + if "console_scripts" in entry_points: + metadata["scripts"] = entry_points.pop("console_scripts") + if "gui_scripts" in entry_points: + metadata["gui-scripts"] = entry_points.pop("gui_scripts") + if entry_points: + metadata["entry-points"] = entry_points + + return metadata, settings def export(project: Project, candidates: List, options: Optional[Any]) -> str: diff --git a/pdm/installers/installers.py b/pdm/installers/installers.py index aec46d461b..6803987301 100644 --- a/pdm/installers/installers.py +++ b/pdm/installers/installers.py @@ -203,7 +203,7 @@ def install_wheel_with_cache( and fs_supports_symlink() ) if not cache_path.is_dir(): - logger.debug("Installing wheel into cached location %s", cache_path) + logger.info("Installing wheel into cached location %s", cache_path) cache_path.mkdir(exist_ok=True) destination = InstallDestination( scheme_dict=package_cache.scheme(), diff --git a/pdm/installers/manager.py b/pdm/installers/manager.py index b7afd0abec..0750a95396 100644 --- a/pdm/installers/manager.py +++ b/pdm/installers/manager.py @@ -8,7 +8,7 @@ from pdm.installers.uninstallers import BaseRemovePaths, StashedRemovePaths if TYPE_CHECKING: - from pdm._types import Distribution + from pdm.compat import Distribution from pdm.models.candidates import Candidate from pdm.models.environment import Environment @@ -36,7 +36,7 @@ def install(self, candidate: Candidate) -> None: else: installer = install_wheel prepared = candidate.prepare(self.environment) - installer(prepared.build(), self.environment, prepared.direct_url()) + installer(str(prepared.build()), self.environment, prepared.direct_url()) def get_paths_to_remove(self, dist: Distribution) -> BaseRemovePaths: """Get the path collection to be removed from the disk""" @@ -45,6 +45,8 @@ def get_paths_to_remove(self, dist: Distribution) -> BaseRemovePaths: def uninstall(self, dist: Distribution) -> None: """Perform the uninstallation for a given distribution""" remove_path = self.get_paths_to_remove(dist) + dist_name = dist.metadata["Name"] + termui.logger.info("Removing distribution %s", dist_name) try: remove_path.remove() remove_path.commit() diff --git a/pdm/installers/synchronizers.py b/pdm/installers/synchronizers.py index 6efd07c5d5..544c97275b 100644 --- a/pdm/installers/synchronizers.py +++ b/pdm/installers/synchronizers.py @@ -4,10 +4,11 @@ import multiprocessing import traceback from concurrent.futures import Future, ThreadPoolExecutor -from typing import Any, Callable, Collection, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, Collection, TypeVar + +from rich.progress import SpinnerColumn from pdm import termui -from pdm._types import Distribution from pdm.exceptions import InstallationError from pdm.installers.manager import InstallManager from pdm.models.candidates import Candidate @@ -15,13 +16,21 @@ from pdm.models.requirements import parse_requirement, strip_extras from pdm.utils import is_editable +if TYPE_CHECKING: + from rich.progress import Progress + + from pdm.compat import Distribution + + +_T = TypeVar("_T") + class DummyFuture: _NOT_SET = object() def __init__(self) -> None: self._result = self._NOT_SET - self._exc: Optional[Exception] = None + self._exc: Exception | None = None def set_result(self, result: Any) -> None: self._result = result @@ -32,10 +41,10 @@ def set_exception(self, exc: Exception) -> None: def result(self) -> Any: return self._result - def exception(self) -> Optional[Exception]: + def exception(self) -> Exception | None: return self._exc - def add_done_callback(self, func: Callable) -> None: + def add_done_callback(self: _T, func: Callable[[_T], Any]) -> None: func(self) @@ -44,7 +53,7 @@ class DummyExecutor: functions are called and awaited for the result """ - def submit(self, func: Callable, *args: str, **kwargs: Any) -> DummyFuture: + def submit(self, func: Callable, *args: Any, **kwargs: Any) -> DummyFuture: future = DummyFuture() try: future.set_result(func(*args, **kwargs)) @@ -52,7 +61,7 @@ def submit(self, func: Callable, *args: str, **kwargs: Any) -> DummyFuture: future.set_exception(exc) return future - def __enter__(self) -> DummyExecutor: + def __enter__(self: _T) -> _T: return self def __exit__(self, *args: Any, **kwargs: Any) -> None: @@ -84,7 +93,7 @@ class Synchronizer: def __init__( self, - candidates: Dict[str, Candidate], + candidates: dict[str, Candidate], environment: Environment, clean: bool = False, dry_run: bool = False, @@ -133,7 +142,7 @@ def __init__( def create_executor( self, - ) -> Union[ThreadPoolExecutor, DummyExecutor]: + ) -> ThreadPoolExecutor | DummyExecutor: if self.parallel: return ThreadPoolExecutor(max_workers=min(multiprocessing.cpu_count(), 8)) else: @@ -151,7 +160,7 @@ def get_manager(self) -> InstallManager: ) @property - def self_key(self) -> Optional[str]: + def self_key(self) -> str | None: name = self.environment.project.name if name: return self.environment.project.meta.project_name.lower() @@ -166,7 +175,7 @@ def _should_update(self, dist: Distribution, can: Candidate) -> bool: else: return dist.version != can.version - def compare_with_working_set(self) -> Tuple[List[str], List[str], List[str]]: + def compare_with_working_set(self) -> tuple[list[str], list[str], list[str]]: """Compares the candidates and return (to_add, to_update, to_remove)""" working_set = self.working_set candidates = self.candidates.copy() @@ -199,85 +208,101 @@ def compare_with_working_set(self) -> Tuple[List[str], List[str], List[str]]: sorted(to_remove) if self.clean else [], ) - def install_candidate(self, key: str) -> Candidate: + def install_candidate(self, key: str, progress: Progress) -> Candidate: """Install candidate""" can = self.candidates[key] - with self.ui.open_spinner(f"Installing {can.format()}...") as spinner: - try: - self.manager.install(can) - except Exception: - spinner.fail(f"Install {can.format()} failed") - raise - else: - spinner.succeed(f"Install {can.format()} successful") - + job = progress.add_task(f"Installing {can.format()}...", total=1) + try: + self.manager.install(can) + except Exception: + progress.live.console.print( + f" [red]{termui.Emoji.FAIL}[/] Install {can.format()} failed" + ) + raise + else: + progress.live.console.print( + f" [green]{termui.Emoji.SUCC}[/] Install {can.format()} successful" + ) + finally: + progress.update(job, completed=1, visible=False) return can - def update_candidate(self, key: str) -> Tuple[Distribution, Candidate]: + def update_candidate( + self, key: str, progress: Progress + ) -> tuple[Distribution, Candidate]: """Update candidate""" can = self.candidates[key] dist = self.working_set[strip_extras(key)[0]] dist_version = dist.version - with self.ui.open_spinner( - f"Updating {termui.green(key, bold=True)} {termui.yellow(dist.version)} " - f"-> {termui.yellow(can.version)}..." - ) as spinner: - try: - self.manager.uninstall(dist) - self.manager.install(can) - except Exception: - spinner.fail( - f"Update {termui.green(key, bold=True)} " - f"{termui.yellow(dist_version)} -> " - f"{termui.yellow(can.version)} failed" - ) - raise - else: - spinner.succeed( - f"Update {termui.green(key, bold=True)} " - f"{termui.yellow(dist_version)} -> " - f"{termui.yellow(can.version)} successful" - ) + job = progress.add_task( + f"Updating [bold green]{key}[/] " + f"[yellow]{dist_version}[/] " + f"-> [yellow]{can.version}[/]...", + total=1, + ) + try: + self.manager.uninstall(dist) + self.manager.install(can) + except Exception: + progress.live.console.print( + f" [red]{termui.Emoji.FAIL}[/] Update [bold green]{key}[/] " + f"[yellow]{dist_version}[/] " + f"-> [yellow]{can.version}[/] failed", + ) + raise + else: + progress.live.console.print( + f" [green]{termui.Emoji.SUCC}[/] Update [bold green]{key}[/] " + f"[yellow]{dist_version}[/] " + f"-> [yellow]{can.version}[/] successful", + ) + finally: + progress.update(job, completed=1, visible=False) + return dist, can - def remove_distribution(self, key: str) -> Distribution: + def remove_distribution(self, key: str, progress: Progress) -> Distribution: """Remove distributions with given names.""" dist = self.working_set[key] dist_version = dist.version - with self.ui.open_spinner( - f"Removing {termui.green(key, bold=True)} {termui.yellow(dist.version)}..." - ) as spinner: - try: - self.manager.uninstall(dist) - except Exception: - spinner.fail( - f"Remove {termui.green(key, bold=True)} " - f"{termui.yellow(dist_version)} failed" - ) - raise - else: - spinner.succeed( - f"Remove {termui.green(key, bold=True)} " - f"{termui.yellow(dist_version)} successful" - ) + + job = progress.add_task( + f"Removing [bold green]{key}[/] " f"[yellow]{dist_version}[/]...", + total=1, + ) + try: + self.manager.uninstall(dist) + except Exception: + progress.live.console.print( + f" [red]{termui.Emoji.FAIL}[/] Remove [bold green]{key}[/] " + f"[yellow]{dist_version}[/] failed", + ) + raise + else: + progress.live.console.print( + f" [green]{termui.Emoji.SUCC}[/] Remove [bold green]{key}[/] " + f"[yellow]{dist_version}[/] successful" + ) + finally: + progress.update(job, completed=1, visible=False) return dist - def _show_headline(self, packages: Dict[str, List[str]]) -> None: + def _show_headline(self, packages: dict[str, list[str]]) -> None: add, update, remove = packages["add"], packages["update"], packages["remove"] if not any((add, update, remove)): - self.ui.echo("All packages are synced to date, nothing to do.\n") + self.ui.echo("All packages are synced to date, nothing to do.") return - results = [termui.bold("Synchronizing working set with lock file:")] + results = ["[bold]Synchronizing working set with lock file[/]:"] results.extend( [ - f"{termui.green(str(len(add)))} to add,", - f"{termui.yellow(str(len(update)))} to update,", - f"{termui.red(str(len(remove)))} to remove", + f"[green]{len(add)}[/] to add,", + f"[yellow]{len(update)}[/] to update,", + f"[red]{len(remove)}[/] to remove", ] ) self.ui.echo(" ".join(results) + "\n") - def _show_summary(self, packages: Dict[str, List[str]]) -> None: + def _show_summary(self, packages: dict[str, list[str]]) -> None: to_add = [self.candidates[key] for key in packages["add"]] to_update = [ (self.working_set[key], self.candidates[key]) for key in packages["update"] @@ -285,22 +310,22 @@ def _show_summary(self, packages: Dict[str, List[str]]) -> None: to_remove = [self.working_set[key] for key in packages["remove"]] lines = [] if to_add: - lines.append(termui.bold("Packages to add:")) + lines.append("[bold]Packages to add[/]:") for can in to_add: lines.append(f" - {can.format()}") if to_update: - lines.append(termui.bold("Packages to update:")) + lines.append("[bold]Packages to update[/]:") for prev, cur in to_update: lines.append( - f" - {termui.green(cur.name, bold=True)} " - f"{termui.yellow(prev.version)} -> {termui.yellow(cur.version)}" + f" - [bold green]{cur.name}[/] " + f"[yellow]{prev.version}[/] -> [yellow]{cur.version}[/]" ) if to_remove: - lines.append(termui.bold("Packages to remove:")) + lines.append("[bold]Packages to remove[/]:") for dist in to_remove: lines.append( - f" - {termui.green(dist.metadata['Name'], bold=True)} " - f"{termui.yellow(dist.version)}" + f" - [bold green]{dist.metadata['Name']}[/] " + f"[yellow]{dist.version}[/]" ) if lines: self.ui.echo("\n".join(lines)) @@ -333,44 +358,46 @@ def synchronize(self) -> None: else: parallel_jobs.append((kind, key)) - errors: List[str] = [] - failed_jobs: List[Tuple[str, str]] = [] + errors: list[str] = [] + failed_jobs: list[tuple[str, str]] = [] - def update_progress( - future: Union[Future, DummyFuture], kind: str, key: str - ) -> None: + def update_progress(future: Future | DummyFuture, kind: str, key: str) -> None: error = future.exception() if error: exc_info = (type(error), error, error.__traceback__) termui.logger.exception("Error occurs: ", exc_info=exc_info) failed_jobs.append((kind, key)) errors.extend( - [f"{kind} {termui.green(key)} failed:\n"] + [f"{kind} [green]{key}[/] failed:\n"] + traceback.format_exception(*exc_info) ) - with self.ui.logging("install"): - with self.ui.indent(" "): - for job in sequential_jobs: - kind, key = job - handlers[kind](key) - for i in range(self.retry_times + 1): - with self.create_executor() as executor: - for job in parallel_jobs: - kind, key = job - future = executor.submit(handlers[kind], key) - future.add_done_callback( - functools.partial(update_progress, kind=kind, key=key) - ) - if not failed_jobs or i == self.retry_times: - break - parallel_jobs, failed_jobs = failed_jobs, [] - errors.clear() - self.ui.echo("Retry failed jobs") + # get rich progess and live handler to deal with multiple spinners + with self.ui.logging("install"), self.ui.make_progress( + " ", + SpinnerColumn(termui.SPINNER, speed=1, style="bold cyan"), + "{task.description}", + ) as progress: + live = progress.live + for kind, key in sequential_jobs: + handlers[kind](key, progress) + for i in range(self.retry_times + 1): + with self.create_executor() as executor: + for kind, key in parallel_jobs: + future = executor.submit(handlers[kind], key, progress) + future.add_done_callback( + functools.partial(update_progress, kind=kind, key=key) + ) + if not failed_jobs or i == self.retry_times: + break + parallel_jobs, failed_jobs = failed_jobs, [] + errors.clear() + live.console.print("Retry failed jobs") if errors: - self.ui.echo(termui.red("\nERRORS:")) - self.ui.echo("".join(errors), err=True) + if self.ui.verbosity < termui.Verbosity.DETAIL: + live.console.print("\n[red]ERRORS[/]:") + live.console.print("".join(errors), end="") raise InstallationError("Some package operations are not complete yet") if self.install_self: @@ -381,11 +408,10 @@ def update_progress( assert self_key self.candidates[self_key] = self_candidate word = "a" if self.no_editable else "an editable" - self.ui.echo(f"Installing the project as {word} package...") - with self.ui.indent(" "): - if self_key in self.working_set: - self.update_candidate(self_key) - else: - self.install_candidate(self_key) - - self.ui.echo(f"\n{termui.Emoji.SUCC} All complete!") + live.console.print(f"Installing the project as {word} package...") + if self_key in self.working_set: + self.update_candidate(self_key, progress) + else: + self.install_candidate(self_key, progress) + + live.console.print(f"\n{termui.Emoji.POPPER} All complete!") diff --git a/pdm/installers/uninstallers.py b/pdm/installers/uninstallers.py index 622ac67e4d..a0676e678b 100644 --- a/pdm/installers/uninstallers.py +++ b/pdm/installers/uninstallers.py @@ -6,15 +6,17 @@ import shutil from pathlib import Path from tempfile import TemporaryDirectory -from typing import Iterable, Type, TypeVar, cast +from typing import TYPE_CHECKING, Iterable, Type, TypeVar, cast from pdm import termui -from pdm._types import Distribution from pdm.exceptions import UninstallError from pdm.installers.packages import CachedPackage -from pdm.models.environment import Environment from pdm.utils import is_egg_link, is_path_relative_to +if TYPE_CHECKING: + from pdm.compat import Distribution + from pdm.models.environment import Environment + _T = TypeVar("_T", bound="BaseRemovePaths") @@ -266,7 +268,7 @@ def commit(self) -> None: self._stashed.clear() self._saved_pth = None if self.refer_to: - termui.logger.debug("Unlink from cached package %s", self.refer_to) + termui.logger.info("Unlink from cached package %s", self.refer_to) CachedPackage(self.refer_to).remove_referrer(os.path.dirname(self.refer_to)) self.refer_to = None diff --git a/pdm/models/auth.py b/pdm/models/auth.py index 8f7d92aa95..b94a47c011 100644 --- a/pdm/models/auth.py +++ b/pdm/models/auth.py @@ -1,16 +1,18 @@ from typing import List, Optional, Tuple -import click +from unearth.auth import MultiDomainBasicAuth from pdm._types import Source from pdm.exceptions import PdmException -from pdm.models.pip_shims import MultiDomainBasicAuth +from pdm.termui import UI try: import keyring except ModuleNotFoundError: keyring = None # type: ignore +ui = UI() + class PdmBasicAuth(MultiDomainBasicAuth): """A custom auth class that differs from Pip's implementation in the @@ -37,16 +39,19 @@ def _prompt_for_password( def _should_save_password_to_keyring(self) -> bool: if keyring is None: - click.secho( + ui.echo( "The provided credentials will not be saved into your system.\n" "You can enable this by installing keyring:\n" " pipx inject pdm keyring\n" "or: pip install --user keyring", err=True, - fg="yellow", + style="yellow", ) return super()._should_save_password_to_keyring() def make_basic_auth(sources: List[Source], prompting: bool) -> PdmBasicAuth: - return PdmBasicAuth(prompting, [source["url"] for source in sources]) + return PdmBasicAuth( + prompting, + [source["url"] for source in sources if source.get("type", "index") == "index"], + ) diff --git a/pdm/models/caches.py b/pdm/models/caches.py index 47026020a1..990a2e33bb 100644 --- a/pdm/models/caches.py +++ b/pdm/models/caches.py @@ -1,18 +1,26 @@ from __future__ import annotations +import contextlib +import dataclasses import hashlib import json +import os from pathlib import Path -from typing import TYPE_CHECKING, Any, Generic, TypeVar +from typing import TYPE_CHECKING, BinaryIO, Generic, Iterable, TypeVar, cast + +import requests +from cachecontrol.cache import BaseCache +from cachecontrol.caches import FileCache +from packaging.utils import canonicalize_name, parse_wheel_filename from pdm._types import CandidateInfo -from pdm.exceptions import CorruptedCacheError -from pdm.models import pip_shims +from pdm.exceptions import PdmException from pdm.models.candidates import Candidate -from pdm.utils import open_file +from pdm.termui import logger +from pdm.utils import atomic_open_for_write if TYPE_CHECKING: - from pip._vendor import requests + from unearth import Link, TargetPython KT = TypeVar("KT") VT = TypeVar("VT") @@ -34,7 +42,7 @@ def _read_cache(self) -> None: try: self._cache = json.load(fp) except json.JSONDecodeError: - raise CorruptedCacheError("The dependencies cache seems to be broken.") + return def _write_cache(self) -> None: with self.cache_file.open("w") as fp: @@ -85,7 +93,7 @@ def _get_key(cls, obj: Candidate) -> str: return f"{obj.name}{extras}-{obj.version}" -class HashCache(pip_shims.SafeFileCache): +class HashCache: """Caches hashes of PyPI artifacts so we do not need to re-download them. @@ -94,25 +102,170 @@ class HashCache(pip_shims.SafeFileCache): avoid issues where the location on the server changes. """ - def __init__(self, *args: Any, **kwargs: Any) -> None: - self.session: requests.Session | None = None - super(HashCache, self).__init__(*args, **kwargs) + FAVORITE_HASH = "sha256" + STRONG_HASHES = ["sha256", "sha384", "sha512"] + + def __init__(self, directory: Path) -> None: + self.directory = directory + + def _read_from_link(self, link: Link, session: requests.Session) -> Iterable[bytes]: + if link.is_file: + with open(link.file_path, "rb") as f: + yield from f + else: + resp = session.get(link.normalized, stream=True) + try: + resp.raise_for_status() + except requests.HTTPError as e: + raise PdmException(f"Failed to read from {link.redacted}: {e}") from e + yield from resp.iter_content(chunk_size=8096) + + def _get_file_hash(self, link: Link, session: requests.Session) -> str: + h = hashlib.new(self.FAVORITE_HASH) + for chunk in self._read_from_link(link, session): + h.update(chunk) + return ":".join([h.name, h.hexdigest()]) - def get_hash(self, link: pip_shims.Link) -> str: + def get_hash(self, link: Link, session: requests.Session) -> str: # If there is no link hash (i.e., md5, sha256, etc.), we don't want # to store it. - hash_value = self.get(link.url) + hash_value = self.get(link.url_without_fragment) if not hash_value: - if link.hash and link.hash_name in pip_shims.STRONG_HASHES: - hash_value = f"{link.hash_name}:{link.hash}".encode() + if link.hash and link.hash_name in self.STRONG_HASHES: + hash_value = f"{link.hash_name}:{link.hash}" else: - hash_value = self._get_file_hash(link).encode() - self.set(link.url, hash_value) - return hash_value.decode("utf8") - - def _get_file_hash(self, link: pip_shims.Link) -> str: - h = hashlib.new(pip_shims.FAVORITE_HASH) - with open_file(link.url, self.session) as fp: - for chunk in iter(lambda: fp.read(8096), b""): - h.update(chunk) - return ":".join([h.name, h.hexdigest()]) + hash_value = self._get_file_hash(link, session) + self.set(link.url_without_fragment, hash_value) + return hash_value + + def _get_path_for_key(self, key: str) -> Path: + hashed = hashlib.sha224(key.encode("utf-8")).hexdigest() + parts = (hashed[:2], hashed[2:4], hashed[4:6], hashed[6:8], hashed[8:]) + return self.directory.joinpath(*parts) + + def get(self, url: str) -> str | None: + path = self._get_path_for_key(url) + with contextlib.suppress(OSError, UnicodeError): + return path.read_text("utf-8").strip() + return None + + def set(self, url: str, hash: str) -> None: + path = self._get_path_for_key(url) + with contextlib.suppress(OSError, UnicodeError): + path.parent.mkdir(parents=True, exist_ok=True) + with atomic_open_for_write(path, encoding="utf-8") as fp: + fp.write(hash) + + +class WheelCache: + """Caches wheels so we do not need to rebuild them. + + Wheels are only cached when the URL contains egg-info or is a VCS repository + with an *immutable* revision. There might be more than one wheels built for + one sdist, the one with most preferred tag will be returned. + """ + + def __init__(self, directory: Path) -> None: + self.directory = directory + + def _get_candidates( + self, link: Link, target_python: TargetPython + ) -> Iterable[Path]: + path = self.get_path_for_link(link, target_python) + if not path.exists(): + return + for candidate in path.iterdir(): + if candidate.name.endswith(".whl"): + yield candidate + + def get_path_for_link(self, link: Link, target_python: TargetPython) -> Path: + hash_key = { + "url": link.url_without_fragment, + # target python participates in the hash key to handle the some cases + # where the sdist produces different wheels on different Pythons, and + # the differences are not encoded in compatibility tags. + "target_python": dataclasses.astuple(target_python), + } + if link.subdirectory: + hash_key["subdirectory"] = link.subdirectory + if link.hash: + hash_key[link.hash_name] = link.hash + hashed = hashlib.sha224( + json.dumps( + hash_key, sort_keys=True, separators=(",", ":"), ensure_ascii=True + ).encode("utf-8") + ).hexdigest() + parts = (hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]) + return self.directory.joinpath(*parts) + + def get( + self, link: Link, project_name: str | None, target_python: TargetPython + ) -> Path | None: + if not project_name: + return None + canonical_name = canonicalize_name(project_name) + tags_priorities = { + tag: i for i, tag in enumerate(target_python.supported_tags()) + } + candidates: list[tuple[int, Path]] = [] + for candidate in self._get_candidates(link, target_python): + try: + name, *_, tags = parse_wheel_filename(candidate.name) + except ValueError: + logger.debug("Ignoring invalid cached wheel %s", candidate.name) + continue + if canonical_name != canonicalize_name(name): + logger.debug( + "Ignoring cached wheel %s with invalid project name %s, " + "expected: %s", + candidate.name, + name, + canonical_name, + ) + continue + if tags.isdisjoint(tags_priorities): + continue + support_min = min( + tags_priorities[tag] for tag in tags if tag in tags_priorities + ) + candidates.append((support_min, candidate)) + if not candidates: + return None + return min(candidates, key=lambda x: x[0])[1] + + +class SafeFileCache(BaseCache): + """ + A file based cache which is safe to use even when the target directory may + not be accessible or writable. + """ + + def __init__(self, directory: str) -> None: + super().__init__() + self.directory = directory + + def _get_cache_path(self, name: str) -> str: + # From cachecontrol.caches.file_cache.FileCache._fn, brought into our + # class for backwards-compatibility and to avoid using a non-public + # method. + hashed = FileCache.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key: str) -> bytes | None: + path = self._get_cache_path(key) + with contextlib.suppress(OSError): + with open(path, "rb") as f: + return f.read() + return None + + def set(self, key: str, value: bytes, expires: int | None = None) -> None: + path = self._get_cache_path(key) + with contextlib.suppress(OSError): + with atomic_open_for_write(path, mode="wb") as f: + cast(BinaryIO, f).write(value) + + def delete(self, key: str) -> None: + path = self._get_cache_path(key) + with contextlib.suppress(OSError): + os.remove(path) diff --git a/pdm/models/candidates.py b/pdm/models/candidates.py index 2b176636a5..5f781769e9 100644 --- a/pdm/models/candidates.py +++ b/pdm/models/candidates.py @@ -2,25 +2,28 @@ import os import re -import sys from pathlib import Path +from tempfile import TemporaryDirectory from typing import TYPE_CHECKING, Any, cast, no_type_check from zipfile import ZipFile +from packaging.utils import parse_wheel_filename +from unearth import Link, vcs_support + from pdm import termui from pdm.builders import EditableBuilder, WheelBuilder +from pdm.compat import importlib_metadata as im from pdm.exceptions import BuildError, CandidateNotFound -from pdm.models import pip_shims from pdm.models.requirements import ( FileRequirement, Requirement, VcsRequirement, _egg_info_re, filter_requirements_with_extras, - parse_metadata_from_source, ) +from pdm.models.setup import Setup +from pdm.project.metadata import MutableMetadata, SetupDistribution from pdm.utils import ( - allow_all_wheels, cached_property, convert_hashes, create_tracked_tempdir, @@ -29,19 +32,14 @@ get_venv_like_prefix, normalize_name, path_replace, - populate_link, + path_to_url, url_without_fragments, ) -if sys.version_info >= (3, 8): - from importlib.metadata import Distribution, PathDistribution -else: - from importlib_metadata import Distribution, PathDistribution - if TYPE_CHECKING: - from pdm.models.environment import Environment + from unearth import Package, PackageFinder -vcs = pip_shims.VcsSupport() + from pdm.models.environment import Environment def _dist_info_files(whl_zip: ZipFile) -> list[str]: @@ -56,7 +54,7 @@ def _dist_info_files(whl_zip: ZipFile) -> list[str]: raise Exception("No .dist-info folder found in wheel") -def _get_wheel_metadata_from_wheel(whl_file: str, metadata_directory: str) -> str: +def _get_wheel_metadata_from_wheel(whl_file: Path, metadata_directory: str) -> str: """Extract the metadata from a wheel. Fallback for when the build backend does not define the 'get_wheel_metadata' hook. @@ -72,6 +70,16 @@ def _filter_none(data: dict[str, Any]) -> dict[str, Any]: return {k: v for k, v in data.items() if v is not None} +def _find_best_match_link( + finder: PackageFinder, req: Requirement, hashes: dict[str, list[str]] | None +) -> Link | None: + """Get the best matching link for a requirement""" + # This function is called when a lock file candidate is given or incompatible wheel + # In this case, the requirement must be pinned, so no need to pass allow_prereleases + best = finder.find_best_match(req.as_line(), hashes=hashes).best + return best.link if best is not None else None + + class Candidate: """A concrete candidate that can be downloaded and installed. A candidate comes from the PyPI index of a package, or from the requirement itself @@ -84,7 +92,7 @@ def __init__( req: Requirement, name: str | None = None, version: str | None = None, - link: pip_shims.Link | None = None, + link: Link | None = None, ): """ :param req: the requirement that produces this candidate. @@ -95,7 +103,9 @@ def __init__( """ self.req = req self.name = name or self.req.project_name - self.version = version or self.req.version + self.version = version + if link is None and not req.is_named: + link = req.as_file_link() # type: ignore self.link = link self.summary = "" self.hashes: dict[str, str] | None = None @@ -133,9 +143,9 @@ def __repr__(self) -> str: @classmethod def from_installation_candidate( - cls, candidate: pip_shims.InstallationCandidate, req: Requirement + cls, candidate: Package, req: Requirement ) -> Candidate: - """Build a candidate from pip's InstallationCandidate.""" + """Build a candidate from unearth's find result.""" return cls( req, name=candidate.name, @@ -192,10 +202,7 @@ def as_lockfile_entry(self, project_root: Path) -> dict[str, Any]: def format(self) -> str: """Format for output.""" - return ( - f"{termui.green(self.name, bold=True)} " - f"{termui.yellow(str(self.version))}" - ) + return f"[bold green]{self.name}[/] [yellow]{self.version}[/]" def prepare(self, environment: Environment) -> PreparedCandidate: """Prepare the candidate for installation.""" @@ -212,55 +219,52 @@ class PreparedCandidate: def __init__(self, candidate: Candidate, environment: Environment) -> None: self.candidate = candidate self.environment = environment - self.wheel: str | None = None self.req = candidate.req - self.ireq = self.get_ireq() + self.wheel: Path | None = None + self.link = self._replace_url_vars(self.candidate.link) + + self._source_dir: Path | None = None + self._unpacked_dir: Path | None = None self._metadata_dir: str | None = None - self._metadata: Distribution | None = None - - def get_ireq(self) -> pip_shims.InstallRequirement: - rv, project = self.req.as_ireq(), self.environment.project - if rv.link: - rv.original_link = rv.link = pip_shims.Link( - expand_env_vars_in_auth( - rv.link.url.replace( - "${PROJECT_ROOT}", - project.root.as_posix().lstrip("/"), # type: ignore - ) - ) - ) - if rv.source_dir: - rv.source_dir = os.path.normpath(os.path.abspath(rv.link.file_path)) - if rv.local_file_path: - rv.local_file_path = rv.link.file_path - elif self.candidate.link: - rv.link = rv.original_link = self.candidate.link - return rv + self._metadata: im.Distribution | None = None + + if self.link is not None and self.link.is_file and self.link.file_path.is_dir(): + self._source_dir = self.link.file_path + self._unpacked_dir = self._source_dir / (self.link.subdirectory or "") + + def _replace_url_vars(self, link: Link | None) -> Link | None: + if link is None: + return None + project_root = self.environment.project.root.as_posix() # type: ignore + url = expand_env_vars_in_auth(link.normalized).replace( + "${PROJECT_ROOT}", project_root.lstrip("/") + ) + return Link(url) @cached_property def revision(self) -> str: - if not (self.ireq.source_dir and os.path.exists(self.ireq.source_dir)): + if not (self._source_dir and os.path.exists(self._source_dir)): # It happens because the cached wheel is hit and the source code isn't # pulled to local. In this case the link url must contain the full commit # hash which can be taken as the revision safely. # See more info at https://github.com/pdm-project/pdm/issues/349 - rev = get_rev_from_url(self.ireq.original_link.url) # type: ignore + rev = get_rev_from_url(self.candidate.link.url) # type: ignore if rev: return rev - return vcs.get_backend(self.req.vcs).get_revision( # type: ignore - cast(str, self.ireq.source_dir) - ) + return vcs_support.get_backend( + self.req.vcs, self.environment.project.core.ui.verbosity # type: ignore + ).get_revision(cast(Path, self._source_dir)) def direct_url(self) -> dict[str, Any] | None: """PEP 610 direct_url.json data""" req = self.req if isinstance(req, VcsRequirement): if req.editable: - assert self.ireq.source_dir + assert self._source_dir return _filter_none( { - "url": pip_shims.path_to_url(self.ireq.source_dir), + "url": path_to_url(self._source_dir.as_posix()), "dir_info": {"editable": True}, "subdirectory": req.subdirectory, } @@ -297,14 +301,13 @@ def direct_url(self) -> dict[str, Any] | None: ) with self.environment.get_finder() as finder: hash_cache = self.environment.project.make_hash_cache() - hash_cache.session = finder.session # type: ignore return _filter_none( { - "url": url_without_fragments(url), + "url": url_without_fragments(req.url), "archive_info": { - "hash": hash_cache.get_hash(pip_shims.Link(url)).replace( - ":", "=" - ) + "hash": hash_cache.get_hash( + Link(url), finder.session + ).replace(":", "=") }, "subdirectory": req.subdirectory, } @@ -312,23 +315,25 @@ def direct_url(self) -> dict[str, Any] | None: else: return None - def build(self) -> str: + def build(self) -> Path: """Call PEP 517 build hook to build the candidate into a wheel""" self.obtain(allow_all=False) if self.wheel: return self.wheel cached = self._get_cached_wheel() if cached: - self.wheel = cached.file_path + self.wheel = cached return self.wheel # type: ignore - assert self.ireq.source_dir, "Source directory isn't ready yet" + assert self._source_dir, "Source directory isn't ready yet" builder_cls = EditableBuilder if self.req.editable else WheelBuilder - builder = builder_cls(self.ireq.unpacked_source_directory, self.environment) + builder = builder_cls(str(self._unpacked_dir), self.environment) build_dir = self._get_wheel_dir() if not os.path.exists(build_dir): os.makedirs(build_dir) - termui.logger.debug("Building wheel for %s", self.ireq.link) - self.wheel = builder.build(build_dir, metadata_directory=self._metadata_dir) + termui.logger.info("Building wheel for %s", self.link) + self.wheel = Path( + builder.build(build_dir, metadata_directory=self._metadata_dir) + ) return self.wheel def obtain(self, allow_all: bool = False) -> None: @@ -336,88 +341,122 @@ def obtain(self, allow_all: bool = False) -> None: :param allow_all: If true, don't validate the wheel tag nor hashes """ - ireq = self.ireq if self.wheel: - if self._wheel_compatible(self.wheel, allow_all): + if self._wheel_compatible(self.wheel.name, allow_all): return - elif ireq.source_dir: + elif self._source_dir and self._source_dir.exists(): return - + hash_options = None if not allow_all and self.candidate.hashes: - ireq.hash_options = convert_hashes(self.candidate.hashes) - with self.environment.get_finder(ignore_requires_python=True) as finder: + hash_options = convert_hashes(self.candidate.hashes) + with self.environment.get_finder(ignore_compatibility=allow_all) as finder: if ( - not ireq.link - or ireq.link.is_wheel - and not self._wheel_compatible(ireq.link.filename, allow_all) + not self.link + or self.link.is_wheel + and not self._wheel_compatible(self.link.filename, allow_all) ): - ireq.link = self.wheel = None # reset the incompatible wheel - with allow_all_wheels(allow_all): - ireq.link = populate_link(finder, ireq, False) - if not ireq.link: - raise CandidateNotFound("No candidate is found for %s", self) - if not ireq.original_link: - ireq.original_link = ireq.link + if self.req.is_file_or_url: + raise CandidateNotFound( + f"The URL requirement {self.req.as_line()} is a wheel but " + "incompatible" + ) + self.link = self.wheel = None # reset the incompatible wheel + self.link = _find_best_match_link( + finder, + self.req.as_pinned_version(self.candidate.version), + hash_options, + ) + if not self.link: + raise CandidateNotFound( + f"No candidate is found for `{self.req.project_name}` " + "that matches the environment or hashes" + ) + if not self.candidate.link: + self.candidate.link = self.link if allow_all and not self.req.editable: cached = self._get_cached_wheel() if cached: - self.wheel = cached.file_path + self.wheel = cached return - downloader = pip_shims.Downloader(finder.session, "off") # type: ignore - self._populate_source_dir() - if not ireq.link.is_existing_dir(): - assert ireq.source_dir - downloaded = pip_shims.unpack_url( # type: ignore - ireq.link, - ireq.source_dir, - downloader, - hashes=ireq.hashes(False), + with TemporaryDirectory(prefix="pdm-download-") as tmpdir: + build_dir = self._get_build_dir() + if self.link.is_wheel: + download_dir = build_dir + else: + download_dir = tmpdir + result = finder.download_and_unpack( + self.link, build_dir, download_dir, hash_options ) - if ireq.link.is_wheel: - assert downloaded - self.wheel = downloaded.path - return + if self.link.is_wheel: + self.wheel = result + else: + self._source_dir = Path(build_dir) + self._unpacked_dir = result - def prepare_metadata(self) -> Distribution: - """Prepare the metadata for the candidate. - Will call the prepare_metadata_* hooks behind the scene - """ + def prepare_metadata(self) -> im.Distribution: self.obtain(allow_all=True) metadir_parent = create_tracked_tempdir(prefix="pdm-meta-") - result: Distribution if self.wheel: + # Get metadata from METADATA inside the wheel self._metadata_dir = _get_wheel_metadata_from_wheel( self.wheel, metadir_parent ) - result = PathDistribution(Path(self._metadata_dir)) - else: - source_dir = self.ireq.unpacked_source_directory - builder = EditableBuilder if self.req.editable else WheelBuilder + return im.PathDistribution(Path(self._metadata_dir)) + + assert self._unpacked_dir, "Source directory isn't ready yet" + # Try getting from PEP 621 metadata + pyproject_toml = self._unpacked_dir / "pyproject.toml" + if pyproject_toml.exists(): try: - self._metadata_dir = builder( - source_dir, self.environment - ).prepare_metadata(metadir_parent) - except BuildError: - termui.logger.warn( - "Failed to build package, try parsing project files." - ) - result = parse_metadata_from_source(source_dir) + metadata = MutableMetadata.from_file(pyproject_toml) + except ValueError: + termui.logger.warn("Failed to parse pyproject.toml") else: - result = PathDistribution(Path(self._metadata_dir)) - if not self.candidate.name: - self.req.name = self.candidate.name = cast(str, result.metadata["Name"]) - if not self.candidate.version: - self.candidate.version = result.version - if not self.candidate.requires_python: - self.candidate.requires_python = cast( - str, result.metadata.get("Requires-Python", "") - ) - return result + dynamic_fields = metadata.dynamic or [] + # Use the parse result only when all are static + if set(dynamic_fields).isdisjoint( + { + "name", + "version", + "dependencies", + "optional-dependencies", + "requires-python", + } + ): + setup = Setup( + name=metadata.name, + version=metadata.version, + install_requires=metadata.dependencies or [], + extras_require=metadata.optional_dependencies or {}, + python_requires=metadata.requires_python or None, + ) + return SetupDistribution(setup) + # If all fail, try building the source to get the metadata + builder = EditableBuilder if self.req.editable else WheelBuilder + try: + self._metadata_dir = builder( + self._unpacked_dir, self.environment + ).prepare_metadata(metadir_parent) + except BuildError: + termui.logger.warn("Failed to build package, try parsing project files.") + setup = Setup.from_directory(self._unpacked_dir) + return SetupDistribution(setup) + else: + return im.PathDistribution(Path(self._metadata_dir)) @property - def metadata(self) -> Distribution: + def metadata(self) -> im.Distribution: if self._metadata is None: - self._metadata = self.prepare_metadata() + result = self.prepare_metadata() + if not self.candidate.name: + self.req.name = self.candidate.name = cast(str, result.metadata["Name"]) + if not self.candidate.version: + self.candidate.version = result.version + if not self.candidate.requires_python: + self.candidate.requires_python = cast( + str, result.metadata.get("Requires-Python", "") + ) + self._metadata = result return self._metadata def get_dependencies_from_metadata(self) -> list[str]: @@ -429,44 +468,44 @@ def get_dependencies_from_metadata(self) -> list[str]: def should_cache(self) -> bool: """Determine whether to cache the dependencies and built wheel.""" - link, source_dir = self.ireq.original_link, self.ireq.source_dir - if self.req.is_vcs and not self.req.editable: + link, source_dir = self.candidate.link, self._source_dir + if self.req.editable: + return False + if self.req.is_named: + return True + if self.req.is_vcs: if not source_dir: # If the candidate isn't prepared, we can't cache it return False - vcs = pip_shims.VcsSupport() assert link - vcs_backend = vcs.get_backend_for_scheme(link.scheme) - return bool( - vcs_backend - and vcs_backend.is_immutable_rev_checkout(link.url, source_dir) + vcs_backend = vcs_support.get_backend( + link.vcs, self.environment.project.core.ui.verbosity ) - elif self.req.is_named: - return True - elif link and not link.is_existing_dir(): - base, _ = link.splitext() + return vcs_backend.is_immutable_revision(source_dir, link) + if link and not (link.is_file and link.file_path.is_dir()): # Cache if the link contains egg-info like 'foo-1.0' - return _egg_info_re.search(base) is not None + return _egg_info_re.search(link.filename) is not None return False - def _get_cached_wheel(self) -> pip_shims.Link | None: + def _get_cached_wheel(self) -> Path | None: wheel_cache = self.environment.project.make_wheel_cache() - supported_tags = pip_shims.get_supported(self.environment.interpreter.for_tag()) - assert self.ireq.original_link - cache_entry = wheel_cache.get_cache_entry( - self.ireq.original_link, cast(str, self.req.project_name), supported_tags + assert self.candidate.link + cache_entry = wheel_cache.get( + self.candidate.link, self.candidate.name, self.environment.target_python ) if cache_entry is not None: - termui.logger.debug("Using cached wheel link: %s", cache_entry.link) - return cache_entry.link - return None - - def _populate_source_dir(self) -> None: - ireq = self.ireq - assert ireq.original_link - if ireq.original_link.is_existing_dir(): - ireq.source_dir = ireq.original_link.file_path - elif self.req.editable: + termui.logger.info("Using cached wheel: %s", cache_entry) + return cache_entry + + def _get_build_dir(self) -> str: + original_link = self.candidate.link + assert original_link + if original_link.is_file and original_link.file_path.is_dir(): + # Local directories are built in tree + return str(original_link.file_path) + if self.req.editable: + # In this branch the requirement must be an editable VCS requirement. + # The repository will be unpacked into a *persistent* src directory. if self.environment.packages_path: src_dir = self.environment.packages_path / "src" else: @@ -477,24 +516,28 @@ def _populate_source_dir(self) -> None: src_dir = venv_prefix / "src" else: src_dir = Path("src") - if not src_dir.is_dir(): - src_dir.mkdir() - ireq.ensure_has_source_dir(str(src_dir)) - elif not ireq.source_dir: - ireq.source_dir = create_tracked_tempdir(prefix="pdm-build-") + src_dir.mkdir(exist_ok=True, parents=True) + dirname = self.candidate.name or self.req.name + if not dirname: + dirname, _ = os.path.splitext(original_link.filename) + return str(src_dir / str(dirname)) + # Otherwise, for source dists, they will be unpacked into a *temp* directory. + return create_tracked_tempdir(prefix="pdm-build-") def _wheel_compatible(self, wheel_file: str, allow_all: bool = False) -> bool: if allow_all: return True - supported_tags = pip_shims.get_supported(self.environment.interpreter.for_tag()) - return pip_shims.PipWheel(os.path.basename(wheel_file)).supported( - supported_tags - ) + supported_tags = self.environment.target_python.supported_tags() + file_tags = parse_wheel_filename(wheel_file)[-1] + return not file_tags.isdisjoint(supported_tags) def _get_wheel_dir(self) -> str: - assert self.ireq.original_link + assert self.candidate.link if self.should_cache(): + termui.logger.info("Saving wheel to cache: %s", self.candidate.link) wheel_cache = self.environment.project.make_wheel_cache() - return wheel_cache.get_path_for_link(self.ireq.original_link) + return wheel_cache.get_path_for_link( + self.candidate.link, self.environment.target_python + ).as_posix() else: return create_tracked_tempdir(prefix="pdm-wheel-") diff --git a/pdm/models/environment.py b/pdm/models/environment.py index 8cc27c7cc6..8a8be0c836 100644 --- a/pdm/models/environment.py +++ b/pdm/models/environment.py @@ -11,9 +11,10 @@ from pathlib import Path from typing import TYPE_CHECKING, Generator +import unearth + from pdm import termui from pdm.exceptions import BuildError -from pdm.models import pip_shims from pdm.models.auth import make_basic_auth from pdm.models.in_process import ( get_pep508_environment, @@ -21,8 +22,9 @@ get_sys_config_paths, ) from pdm.models.python import PythonInfo +from pdm.models.session import PDMSession from pdm.models.working_set import WorkingSet -from pdm.utils import cached_property, get_finder, is_venv_python, pdm_scheme +from pdm.utils import cached_property, get_index_urls, is_venv_python, pdm_scheme if TYPE_CHECKING: from pdm._types import Source @@ -75,7 +77,8 @@ def __init__(self, project: Project) -> None: self.project = project self.interpreter = project.python self.auth = make_basic_auth( - self.project.sources, self.project.core.ui.verbosity >= termui.DETAIL + self.project.sources, + self.project.core.ui.verbosity >= termui.Verbosity.DETAIL, ) def get_paths(self) -> dict[str, str]: @@ -102,33 +105,52 @@ def packages_path(self) -> Path: pypackages.joinpath(subdir).mkdir(exist_ok=True, parents=True) return pypackages + @cached_property + def target_python(self) -> unearth.TargetPython: + # TODO: get abi, platform and impl from subprocess + python_version = self.interpreter.version_tuple + python_abi_tag = get_python_abi_tag(str(self.interpreter.executable)) + return unearth.TargetPython(python_version, [python_abi_tag]) + @contextmanager def get_finder( self, sources: list[Source] | None = None, - ignore_requires_python: bool = False, - ) -> Generator[pip_shims.PackageFinder, None, None]: + ignore_compatibility: bool = False, + ) -> Generator[unearth.PackageFinder, None, None]: """Return the package finder of given index sources. :param sources: a list of sources the finder should search in. - :param ignore_requires_python: whether to ignore the python version constraint. + :param ignore_compatibility: whether to ignore the python version + and wheel tags. """ if sources is None: sources = self.project.sources - python_version = self.interpreter.version_tuple - python_abi_tag = get_python_abi_tag(str(self.interpreter.executable)) - finder = get_finder( - sources, - self.project.cache_dir.as_posix(), - python_version, - python_abi_tag, - ignore_requires_python, + index_urls, find_links, trusted_hosts = get_index_urls(sources) + session = PDMSession( + cache_dir=self.project.cache("http"), + index_urls=index_urls, + trusted_hosts=trusted_hosts, ) - # Reuse the auth across sessions to avoid prompting repeatedly. - finder.session.auth = self.auth # type: ignore - yield finder - finder.session.close() # type: ignore + session.auth = self.auth + finder = unearth.PackageFinder( + session=session, + index_urls=index_urls, + find_links=find_links, + target_python=self.target_python, + ignore_compatibility=ignore_compatibility, + no_binary=os.getenv("PDM_NO_BINARY", "").split(","), + only_binary=os.getenv("PDM_ONLY_BINARY", "").split(","), + respect_source_order=self.project.tool_settings.get("resolution", {}).get( + "respect-source-order", False + ), + verbosity=self.project.core.ui.verbosity, + ) + try: + yield finder + finally: + session.close() def get_working_set(self) -> WorkingSet: """Get the working set based on local packages directory.""" @@ -165,33 +187,30 @@ def update_shebangs(self, new_path: str) -> None: child.write_bytes(_replace_shebang(child.read_bytes(), new_shebang)) def _download_pip_wheel(self, path: str | Path) -> None: - dirname = Path(tempfile.mkdtemp(prefix="pip-download-")) - try: - subprocess.check_call( - [ - getattr(sys, "_original_executable", sys.executable), - "-m", - "pip", - "download", - "--only-binary=:all:", - "-d", - str(dirname), - "pip<21", # pip>=21 drops the support of py27 - ], - ) - wheel_file = next(dirname.glob("pip-*.whl")) - shutil.move(str(wheel_file), path) - except subprocess.CalledProcessError: - raise BuildError("Failed to download pip for the given interpreter") - finally: - shutil.rmtree(dirname, ignore_errors=True) + download_error = BuildError("Can't get a working copy of pip for the project") + with self.get_finder([self.project.default_source]) as finder: + finder.only_binary = ["pip"] + best_match = finder.find_best_match("pip").best + if not best_match: + raise download_error + with tempfile.TemporaryDirectory(prefix="pip-download-") as dirname: + try: + downloaded = finder.download_and_unpack( + best_match.link, dirname, dirname + ) + except unearth.UnpackError: + raise download_error + shutil.move(str(downloaded), path) @cached_property def pip_command(self) -> list[str]: """Get a pip command for this environment, and download one if not available. Return a list of args like ['python', '-m', 'pip'] """ - from pip import __file__ as pip_location + try: + from pip import __file__ as pip_location + except ModuleNotFoundError: + pip_location = None # type: ignore python_major = self.interpreter.major executable = str(self.interpreter.executable) @@ -200,15 +219,20 @@ def pip_command(self) -> list[str]: ) if proc.returncode == 0: # The pip has already been installed with the executable, just use it - return [executable, "-Esm", "pip"] - if python_major == 3: - # Use the host pip package. - return [executable, "-Es", os.path.dirname(pip_location)] - # For py2, only pip<21 is eligible, download a pip wheel from the Internet. - pip_wheel = self.project.cache_dir / "pip.whl" - if not pip_wheel.is_file(): - self._download_pip_wheel(pip_wheel) - return [executable, str(pip_wheel / "pip")] + command = [executable, "-Esm", "pip"] + elif python_major == 3 and pip_location: + # Use the host pip package if available + command = [executable, "-Es", os.path.dirname(pip_location)] + else: + # Otherwise, download a pip wheel from the Internet. + pip_wheel = self.project.cache_dir / "pip.whl" + if not pip_wheel.is_file(): + self._download_pip_wheel(pip_wheel) + command = [executable, str(pip_wheel / "pip")] + verbosity = self.project.core.ui.verbosity + if verbosity > 0: + command.append("-" + "v" * verbosity) + return command class GlobalEnvironment(Environment): @@ -244,11 +268,12 @@ def __init__(self, project: Project) -> None: self.project = project self.interpreter = PythonInfo.from_path(sys.executable) self.auth = make_basic_auth( - self.project.sources, self.project.core.ui.verbosity >= termui.DETAIL + self.project.sources, + self.project.core.ui.verbosity >= termui.Verbosity.DETAIL, ) def get_working_set(self) -> WorkingSet: if self.project.project_config.config_file.exists(): - return super().get_working_set() + return self.project.get_environment().get_working_set() else: return WorkingSet([]) diff --git a/pdm/models/in_process/__init__.py b/pdm/models/in_process/__init__.py index bd8b57480e..2687940a48 100644 --- a/pdm/models/in_process/__init__.py +++ b/pdm/models/in_process/__init__.py @@ -6,7 +6,7 @@ import os import subprocess from pathlib import Path -from typing import Dict, Optional +from typing import Any, Dict, Optional FOLDER_PATH = Path(__file__).parent @@ -35,3 +35,9 @@ def get_pep508_environment(executable: str) -> Dict[str, str]: script = str(FOLDER_PATH / "pep508.py") args = [executable, "-Es", script] return json.loads(subprocess.check_output(args)) + + +def parse_setup_py(executable: str, path: str) -> Dict[str, Any]: + """Parse setup.py and return the kwargs""" + cmd = [executable, "-Es", str(FOLDER_PATH / "parse_setup.py"), path] + return json.loads(subprocess.check_output(cmd)) diff --git a/pdm/models/in_process/parse_setup.py b/pdm/models/in_process/parse_setup.py new file mode 100644 index 0000000000..a36d0c206f --- /dev/null +++ b/pdm/models/in_process/parse_setup.py @@ -0,0 +1,211 @@ +import os +import sys +from typing import Any, Dict + + +def _parse_setup_cfg(path: str) -> Dict[str, Any]: + import configparser + + setup_cfg = configparser.ConfigParser() + setup_cfg.read(path, encoding="utf-8") + + result: Dict[str, Any] = {} + if not setup_cfg.has_section("metadata"): + return result + + metadata = setup_cfg["metadata"] + + if "name" in metadata: + result["name"] = metadata["name"] + + if "description" in metadata: + result["description"] = metadata["description"] + + if "license" in metadata: + result["license"] = metadata["license"] + + if "author" in metadata: + result["author"] = metadata["author"] + + if "author_email" in metadata: + result["author_email"] = metadata["author_email"] + + if "maintainer" in metadata: + result["maintainer"] = metadata["maintainer"] + + if "maintainer_email" in metadata: + result["maintainer_email"] = metadata["maintainer_email"] + + if "keywords" in metadata: + keywords = metadata["keywords"].strip().splitlines() + result["keywords"] = keywords if len(keywords) > 1 else keywords[0] + + if "classifiers" in metadata: + result["classifiers"] = metadata["classifiers"].strip().splitlines() + + if "url" in metadata: + result["url"] = metadata["url"] + + if "download_url" in metadata: + result["download_url"] = metadata["download_url"] + + if "project_urls" in metadata: + result["project_urls"] = dict( + [u.strip() for u in url.split("=", 1)] + for url in metadata["project_urls"].strip().splitlines() + ) + + if "long_description" in metadata: + long_description = metadata["long_description"].strip() + if long_description.startswith("file:"): + result["readme"] = long_description[5:].strip() + + if setup_cfg.has_section("options"): + options = setup_cfg["options"] + + if "python_requires" in options: + result["python_requires"] = options["python_requires"] + + if "install_requires" in options: + result["install_requires"] = ( + options["install_requires"].strip().splitlines() + ) + + if "package_dir" in options: + result["package_dir"] = dict( + [p.strip() for p in d.split("=", 1)] + for d in options["package_dir"].strip().splitlines() + ) + + if setup_cfg.has_section("options.extras_require"): + result["extras_require"] = { + feature: dependencies.strip().splitlines() + for feature, dependencies in setup_cfg["options.extras_require"].items() + } + + if setup_cfg.has_section("options.entry_points"): + result["entry_points"] = { + entry_point: definitions.strip().splitlines() + for entry_point, definitions in setup_cfg["options.entry_points"].items() + } + + return result + + +setup_kwargs = {} +SUPPORTED_ARGS = ( + "name", + "version", + "description", + "license", + "author", + "author_email", + "maintainer", + "maintainer_email", + "keywords", + "classifiers", + "url", + "download_url", + "project_urls", + "python_requires", + "install_requires", + "extras_require", + "entry_points", + "package_dir", +) + + +def fake_setup(**kwargs): + setup_kwargs.update((k, v) for k, v in kwargs.items() if k in SUPPORTED_ARGS) + + +def clean_metadata(metadata: Dict[str, Any]) -> None: + author = {} + if "author" in metadata: + author["name"] = metadata.pop("author") + if "author_email" in metadata: + author["email"] = metadata.pop("author_email") + if author: + metadata["authors"] = [author] + maintainer = {} + if "maintainer" in metadata: + maintainer["name"] = metadata.pop("maintainer") + if "maintainer_email" in metadata: + maintainer["email"] = metadata.pop("maintainer_email") + if maintainer: + metadata["maintainers"] = [maintainer] + + urls = {} + if "url" in metadata: + urls["Homepage"] = metadata.pop("url") + if "download_url" in metadata: + urls["Downloads"] = metadata.pop("download_url") + if "project_urls" in metadata: + urls.update(metadata.pop("project_urls")) + if urls: + metadata["urls"] = urls + + if "" in metadata.get("package_dir", {}): + metadata["package_dir"] = metadata["package_dir"][""] + + if "keywords" in metadata: + keywords = metadata["keywords"] + if isinstance(keywords, str): + keywords = [k.strip() for k in keywords.split(",")] + metadata["keywords"] = keywords + + if "entry_points" in metadata and isinstance(metadata["entry_points"], dict): + entry_points = {} + for entry_point, definitions in metadata["entry_points"].items(): + if isinstance(definitions, str): + definitions = [definitions] + definitions = dict( + sorted(d.replace(" ", "").split("=", 1) for d in definitions) + ) + + entry_points[entry_point] = definitions + if entry_points: + metadata["entry_points"] = dict(sorted(entry_points.items())) + + +def parse_setup(path: str) -> Dict[str, Any]: + import tokenize + + import setuptools + + setuptools.setup = fake_setup + + parsed: Dict[str, Any] = {} + path = os.path.abspath(path) + project_path = os.path.dirname(path) + os.chdir(project_path) + if os.path.exists("setup.cfg"): + parsed.update(_parse_setup_cfg("setup.cfg")) + + # Execute setup.py and get the kwargs + __file__ = sys.argv[0] = path + sys.path.insert(0, project_path) + setup_kwargs.clear() + + with tokenize.open(path) as f: + code = f.read() + exec( + compile(code, __file__, "exec"), + {"__name__": "__main__", "__file__": __file__, "setup_kwargs": setup_kwargs}, + ) + parsed.update(setup_kwargs) + + if "readme" not in parsed: + for readme_file in ("README.md", "README.rst", "README.txt"): + readme_path = os.path.join(project_path, readme_file) + if os.path.exists(readme_path): + parsed["readme"] = readme_file + break + clean_metadata(parsed) + return parsed + + +if __name__ == "__main__": + import json + + print(json.dumps(parse_setup(sys.argv[1]))) diff --git a/pdm/models/markers.py b/pdm/models/markers.py index 797a7708f5..b18a575e4a 100644 --- a/pdm/models/markers.py +++ b/pdm/models/markers.py @@ -4,7 +4,7 @@ from functools import reduce from typing import Any, List, Optional, Set, Tuple, Union -from pip._vendor.packaging.markers import Marker as PackageMarker +from packaging.markers import Marker as PackageMarker from pdm.models.specifiers import PySpecSet from pdm.utils import join_list_with diff --git a/pdm/models/pip_shims.py b/pdm/models/pip_shims.py deleted file mode 100644 index 929a211ea3..0000000000 --- a/pdm/models/pip_shims.py +++ /dev/null @@ -1,86 +0,0 @@ -# flake8: noqa -""" -This module provides a middle layer between pdm and pip. -All pip members are imported here for compatibility purpose. -""" -from __future__ import annotations - -import atexit -import inspect -from functools import partial -from typing import TYPE_CHECKING, Any, Optional, Tuple - -from pip._internal.cache import WheelCache -from pip._internal.commands.install import InstallCommand as _InstallCommand -from pip._internal.index.package_finder import PackageFinder -from pip._internal.models.candidate import InstallationCandidate -from pip._internal.models.format_control import FormatControl -from pip._internal.models.link import Link -from pip._internal.models.target_python import TargetPython -from pip._internal.models.wheel import Wheel as PipWheel -from pip._internal.network.auth import MultiDomainBasicAuth -from pip._internal.network.cache import SafeFileCache -from pip._internal.network.download import Downloader -from pip._internal.operations.prepare import unpack_url -from pip._internal.req import InstallRequirement, req_uninstall -from pip._internal.req.constructors import ( - install_req_from_editable, - install_req_from_line, - install_req_from_parsed_requirement, -) -from pip._internal.req.req_file import parse_requirements -from pip._internal.utils import logging as pip_logging -from pip._internal.utils import misc -from pip._internal.utils.compatibility_tags import get_supported -from pip._internal.utils.filesystem import directory_size, file_size, find_files -from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES -from pip._internal.utils.temp_dir import global_tempdir_manager -from pip._internal.utils.urls import path_to_url, url_to_path -from pip._internal.vcs.versioncontrol import VcsSupport - -if TYPE_CHECKING: - from optparse import Values - - -if "verbosity" in inspect.getfullargspec(unpack_url).args: - unpack_url = partial(unpack_url, verbosity=0) - - -class InstallCommand(_InstallCommand): - def __init__(self) -> None: - super().__init__(name="InstallCommand", summary="Install packages.") - - -def get_package_finder( - install_cmd: InstallCommand, - options: Optional[Values] = None, - python_version: Optional[Tuple[int, ...]] = None, - python_abi_tag: Optional[str] = None, - ignore_requires_python: Optional[bool] = None, -) -> PackageFinder: - """Shim for compatibility to generate package finders. - - Build and return a :class:`~pip._internal.index.package_finder.PackageFinder` - instance using the :class:`~pip._internal.commands.install.InstallCommand` helper - method to construct the finder, shimmed with backports as needed for compatibility. - """ - if options is None: - options, _ = install_cmd.parser.parse_args([]) - session = install_cmd._build_session(options) - atexit.register(session.close) - build_kwargs = {"options": options, "session": session} - if python_version: - assert python_abi_tag is not None - target_python_builder = TargetPython - builder_args = inspect.signature(target_python_builder).parameters - target_python_params: dict[str, Any] = {"py_version_info": python_version} - if "abi" in builder_args: - target_python_params["abi"] = python_abi_tag - elif "abis" in builder_args: - target_python_params["abis"] = [python_abi_tag] - - target_python = target_python_builder(**target_python_params) - build_kwargs["target_python"] = target_python - - build_kwargs["ignore_requires_python"] = ignore_requires_python - return install_cmd._build_package_finder(**build_kwargs) # type: ignore diff --git a/pdm/models/project_info.py b/pdm/models/project_info.py index 2c4c9efd2b..6a91f6ebb4 100644 --- a/pdm/models/project_info.py +++ b/pdm/models/project_info.py @@ -1,16 +1,12 @@ from __future__ import annotations import itertools -import sys -from typing import Any, Iterator +from typing import TYPE_CHECKING, Any, Iterator -from pdm import termui from pdm.pep517.metadata import Metadata -if sys.version_info >= (3, 8): - from importlib.metadata import Distribution -else: - from importlib_metadata import Distribution +if TYPE_CHECKING: + from pdm.compat import Distribution class ProjectInfo: @@ -68,22 +64,22 @@ def __getitem__(self, key: str) -> Any: return self._parsed[key] def generate_rows(self) -> Iterator[tuple[str, str]]: - yield termui.cyan("Name:"), self._parsed["name"] - yield termui.cyan("Latest version:"), self._parsed["version"] + yield "[b cyan]Name[/]:", self._parsed["name"] + yield "[b cyan]Latest version[/]:", self._parsed["version"] if self.latest_stable_version: - yield (termui.cyan("Latest stable version:"), self.latest_stable_version) + yield ("[b cyan]Latest stable version[/]:", self.latest_stable_version) if self.installed_version: - yield (termui.green("Installed version:"), self.installed_version) - yield termui.cyan("Summary:"), self._parsed.get("summary", "") - yield termui.cyan("Requires Python:"), self._parsed["requires-python"] - yield termui.cyan("Author:"), self._parsed.get("author", "") - yield termui.cyan("Author email:"), self._parsed.get("email", "") - yield termui.cyan("License:"), self._parsed.get("license", "") - yield termui.cyan("Homepage:"), self._parsed.get("homepage", "") + yield ("[b cyan]Installed version[/]:", self.installed_version) + yield "[b cyan]Summary[/]:", self._parsed.get("summary", "") + yield "[b cyan]Requires Python:", self._parsed["requires-python"] + yield "[b cyan]Author[/]:", self._parsed.get("author", "") + yield "[b cyan]Author email[/]:", self._parsed.get("email", "") + yield "[b cyan]License[/]:", self._parsed.get("license", "") + yield "[b cyan]Homepage[/]:", self._parsed.get("homepage", "") yield from itertools.zip_longest( - (termui.cyan("Project URLs:"),), + ("[b cyan]Project URLs[/]:",), self._parsed.get("project-urls", []), fillvalue="", ) - yield termui.cyan("Platform:"), self._parsed.get("platform", "") - yield termui.cyan("Keywords:"), self._parsed.get("keywords", "") + yield "[b cyan]Platform[/]:", self._parsed.get("platform", "") + yield "[b cyan]Keywords[/]:", self._parsed.get("keywords", "") diff --git a/pdm/models/repositories.py b/pdm/models/repositories.py index fd7b342159..1565d40c46 100644 --- a/pdm/models/repositories.py +++ b/pdm/models/repositories.py @@ -5,22 +5,20 @@ from functools import lru_cache, wraps from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, TypeVar, cast -from packaging.version import parse as parse_version -from pip._vendor.html5lib import parse - from pdm import termui -from pdm._types import CandidateInfo, Package, SearchResult, Source -from pdm.exceptions import CandidateInfoNotFound, CandidateNotFound, CorruptedCacheError +from pdm.exceptions import CandidateInfoNotFound, CandidateNotFound from pdm.models.candidates import Candidate from pdm.models.requirements import ( Requirement, filter_requirements_with_extras, parse_requirement, ) -from pdm.models.specifiers import PySpecSet, get_specifier -from pdm.utils import allow_all_wheels, normalize_name, url_without_fragments +from pdm.models.search import SearchResultParser +from pdm.models.specifiers import PySpecSet +from pdm.utils import normalize_name, url_without_fragments if TYPE_CHECKING: + from pdm._types import CandidateInfo, SearchResult, Source from pdm.models.environment import Environment ALLOW_ALL_PYTHON = PySpecSet() @@ -102,11 +100,7 @@ def find_candidates( # `allow_prereleases` is None means leave it to specifier to decide whether to # include prereleases requires_python = requirement.requires_python & self.environment.python_requires - cans = sorted( - self._find_candidates(requirement), - key=lambda c: (parse_version(c.version), c.link.is_wheel), # type: ignore - reverse=True, - ) + cans = list(self._find_candidates(requirement)) applicable_cans = [ c for c in cans @@ -177,9 +171,6 @@ def print_candidates( def _get_dependencies_from_cache(self, candidate: Candidate) -> CandidateInfo: try: result = self._candidate_info_cache.get(candidate) - except CorruptedCacheError: - self._candidate_info_cache.clear() - raise CandidateInfoNotFound(candidate) except KeyError: raise CandidateInfoNotFound(candidate) return result @@ -204,22 +195,19 @@ def get_hashes(self, candidate: Candidate) -> dict[str, str] | None: return None if candidate.hashes: return candidate.hashes - req = dataclasses.replace( - candidate.req, specifier=get_specifier(f"=={candidate.version}") - ) + req = candidate.req.as_pinned_version(candidate.version) if candidate.req.is_file_or_url: matching_candidates: Iterable[Candidate] = [candidate] else: - matching_candidates = self.find_candidates(req, True) + matching_candidates = self.find_candidates(req, ignore_requires_python=True) result: dict[str, str] = {} with self.environment.get_finder(self.sources) as finder: - self._hash_cache.session = finder.session # type: ignore for c in matching_candidates: - link = c.prepare(self.environment).ireq.link + # Prepare the candidate to replace vars in the link URL + link = c.prepare(self.environment).link if not link or link.is_vcs: continue - result[link.filename] = self._hash_cache.get_hash(link) - + result[link.filename] = self._hash_cache.get_hash(link, finder.session) return result or None def dependency_generators(self) -> Iterable[Callable[[Candidate], CandidateInfo]]: @@ -257,7 +245,7 @@ def _get_dependencies_from_json(self, candidate: Candidate) -> CandidateInfo: if proc_url.endswith("/simple") ] with self.environment.get_finder(sources) as finder: - session = finder.session # type: ignore + session = finder.session for prefix in url_prefixes: json_url = f"{prefix}/pypi/{candidate.name}/{candidate.version}/json" resp = session.get(json_url) @@ -289,10 +277,10 @@ def dependency_generators(self) -> Iterable[Callable[[Candidate], CandidateInfo] @lru_cache() def _find_candidates(self, requirement: Requirement) -> Iterable[Candidate]: sources = self.get_filtered_sources(requirement) - with self.environment.get_finder(sources, True) as finder, allow_all_wheels(): + with self.environment.get_finder(sources, True) as finder: cans = [ Candidate.from_installation_candidate(c, requirement) - for c in finder.find_all_candidates(requirement.project_name) + for c in finder.find_all_packages(requirement.project_name) ] if not cans: raise CandidateNotFound( @@ -303,7 +291,6 @@ def _find_candidates(self, requirement: Requirement) -> Iterable[Candidate]: def search(self, query: str) -> SearchResult: pypi_simple = self.sources[0]["url"].rstrip("/") - results = [] if pypi_simple.endswith("/simple"): search_url = pypi_simple[:-6] + "search" @@ -315,34 +302,19 @@ def search(self, query: str) -> SearchResult: resp = session.get(search_url, params={"q": query}) if resp.status_code == 404: self.environment.project.core.ui.echo( - termui.yellow( - f"{pypi_simple!r} doesn't support '/search' endpoint, fallback " - f"to {self.DEFAULT_INDEX_URL!r} now.\n" - "This may take longer depending on your network condition." - ), + f"{pypi_simple!r} doesn't support '/search' endpoint, fallback " + f"to {self.DEFAULT_INDEX_URL!r} now.\n" + "This may take longer depending on your network condition.", err=True, + style="yellow", ) resp = session.get( f"{self.DEFAULT_INDEX_URL}/search", params={"q": query} ) + parser = SearchResultParser() resp.raise_for_status() - content = parse(resp.content, namespaceHTMLElements=False) - - for result in content.findall(".//*[@class='package-snippet']"): - name = result.find("h3/*[@class='package-snippet__name']").text - version = result.find("h3/*[@class='package-snippet__version']").text - - if not name or not version: - continue - - description = result.find("p[@class='package-snippet__description']").text - if not description: - description = "" - - result = Package(name, version, description) - results.append(result) - - return results + parser.feed(resp.text) + return parser.results class LockedRepository(BaseRepository): diff --git a/pdm/models/requirements.py b/pdm/models/requirements.py index b39894302a..44a5240b35 100644 --- a/pdm/models/requirements.py +++ b/pdm/models/requirements.py @@ -6,41 +6,35 @@ import os import re import secrets -import sys import urllib.parse as urlparse import warnings from pathlib import Path -from typing import Any, Sequence, Type, TypeVar, cast +from typing import TYPE_CHECKING, Any, Sequence, Type, TypeVar, cast -from pip._vendor.packaging.markers import InvalidMarker -from pip._vendor.packaging.requirements import InvalidRequirement -from pip._vendor.packaging.specifiers import SpecifierSet -from pip._vendor.pkg_resources import Requirement as PackageRequirement -from pip._vendor.pkg_resources import RequirementParseError, safe_name +from packaging.markers import InvalidMarker +from packaging.requirements import InvalidRequirement +from packaging.requirements import Requirement as PackageRequirement +from packaging.specifiers import SpecifierSet +from packaging.utils import parse_sdist_filename, parse_wheel_filename +from packaging.version import parse as parse_version +from unearth import Link -from pdm._types import RequirementDict +from pdm.compat import Distribution from pdm.exceptions import ExtrasWarning, RequirementError from pdm.models.markers import Marker, get_marker, split_marker_extras -from pdm.models.pip_shims import ( - InstallRequirement, - Link, - install_req_from_editable, - install_req_from_line, - path_to_url, - url_to_path, -) from pdm.models.setup import Setup from pdm.models.specifiers import PySpecSet, get_specifier from pdm.utils import ( add_ssh_scheme_to_git_uri, - parse_name_version_from_wheel, + normalize_name, + path_to_url, + url_to_path, url_without_fragments, ) -if sys.version_info >= (3, 8): - from importlib.metadata import Distribution -else: - from importlib_metadata import Distribution +if TYPE_CHECKING: + from pdm._types import RequirementDict + VCS_SCHEMA = ("git", "hg", "svn", "bzr") _vcs_req_re = re.compile( @@ -89,33 +83,32 @@ def __post_init__(self) -> None: @property def project_name(self) -> str | None: - return safe_name(self.name) if self.name else None # type: ignore + return normalize_name(self.name, lowercase=False) if self.name else None @property def key(self) -> str | None: return self.project_name.lower() if self.project_name else None @property - def version(self) -> str | None: + def is_pinned(self) -> bool: if not self.specifier: - return None + return False - is_pinned = len(self.specifier) == 1 and next( - iter(self.specifier) - ).operator in ( + return len(self.specifier) == 1 and next(iter(self.specifier)).operator in ( "==", "===", ) - if is_pinned: - return next(iter(self.specifier)).version - return None - - @version.setter - def version(self, v: str) -> None: - if not v or v == "*": - self.specifier = SpecifierSet() - else: - self.specifier = get_specifier(f"=={v}") + + def as_pinned_version(self: T, other_version: str | None) -> T: + """Return a new requirement with the given pinned version.""" + if self.is_pinned or not other_version: + return self + version = parse_version(other_version) + normalized = str(version) + if version.local: + # Remove the local part to accept wider range of prereleases. + normalized = normalized.rsplit("+", 1)[0] + return dataclasses.replace(self, specifier=get_specifier(f"=={normalized}")) def _hash_key(self) -> tuple: return ( @@ -211,7 +204,7 @@ def is_file_or_url(self) -> bool: def as_line(self) -> str: raise NotImplementedError - def matches(self, line: str, editable_match: bool = True) -> bool: + def matches(self, line: str) -> bool: """Return whether the passed in PEP 508 string is the same requirement as this one. """ @@ -219,22 +212,7 @@ def matches(self, line: str, editable_match: bool = True) -> bool: req = parse_requirement(line.split("-e ", 1)[-1], True) else: req = parse_requirement(line, False) - return self.key == req.key and ( - not editable_match or self.editable == req.editable - ) - - def as_ireq(self, **kwargs: Any) -> InstallRequirement: - line_for_req = self.as_line() - try: - if self.editable: - line_for_req = line_for_req[3:].strip() - ireq = install_req_from_editable(line_for_req, **kwargs) - else: - ireq = install_req_from_line(line_for_req, **kwargs) - except Exception as e: - raise RequirementError(e) - ireq.req = self # type: ignore - return ireq + return self.key == req.key @classmethod def from_pkg_requirement(cls, req: PackageRequirement) -> "Requirement": @@ -335,18 +313,36 @@ def is_local(self) -> bool: def is_local_dir(self) -> bool: return self.is_local and cast(Path, self.path).is_dir() + def as_file_link(self) -> Link: + url = self.get_full_url() + # only subdirectory is useful in a file link + if self.subdirectory: + url += f"#subdirectory={self.subdirectory}" + return Link(url) + + def get_full_url(self) -> str: + return url_without_fragments(self.url) + def as_line(self) -> str: project_name = f"{self.project_name}" if self.project_name else "" - extras = f"[{','.join(sorted(self.extras))}]" if self.extras else "" + extras = ( + f"[{','.join(sorted(self.extras))}]" + if self.extras and self.project_name + else "" + ) marker = self._format_marker() - url = url_without_fragments(self.url) - if self.editable or self.subdirectory: - fragments = f"egg={project_name}{extras}" - if self.subdirectory: - fragments = f"{fragments}&subdirectory={self.subdirectory}" - return f"{'-e ' if self.editable else ''}{url}#{fragments}{marker}" + url = self.get_full_url() + fragments = [] + if self.subdirectory: + fragments.append(f"subdirectory={self.subdirectory}") + if self.editable: + if project_name: + fragments.insert(0, f"egg={project_name}{extras}") + fragment_str = ("#" + "&".join(fragments)) if fragments else "" + return f"-e {url}{fragment_str}{marker}" delimiter = " @ " if project_name else "" - return f"{project_name}{extras}{delimiter}{url}{marker}" + fragment_str = ("#" + "&".join(fragments)) if fragments else "" + return f"{project_name}{extras}{delimiter}{url}{fragment_str}{marker}" def _parse_name_from_url(self) -> None: parsed = urlparse.urlparse(self.url) @@ -361,13 +357,17 @@ def _parse_name_from_url(self) -> None: urlparse.unquote(url_without_fragments(self.url)) ) if filename.endswith(".whl"): - self.name, _ = parse_name_version_from_wheel(filename) + self.name, *_ = parse_wheel_filename(filename) else: - match = _egg_info_re.match(filename) - # Filename is like `-.tar.gz`, where name will be - # extracted and version will be left to be determined from the metadata. - if match: - self.name = match.group(1) + try: + self.name, *_ = parse_sdist_filename(filename) + except ValueError: + match = _egg_info_re.match(filename) + # Filename is like `-.tar.gz`, where name will be + # extracted and version will be left to be determined from + # the metadata. + if match: + self.name = match.group(1) def _check_installable(self) -> None: assert self.path @@ -394,22 +394,13 @@ def __post_init__(self) -> None: if not self.vcs: self.vcs = self.url.split("+", 1)[0] - def as_line(self) -> str: - project_name = f"{self.project_name}" if self.project_name else "" - extras = f"[{','.join(sorted(self.extras))}]" if self.extras else "" - marker = self._format_marker() - url = self.url + def get_full_url(self) -> str: + url = super().get_full_url() if self.revision and not self.editable: url += f"@{self.revision}" elif self.ref: url += f"@{self.ref}" - if self.editable or self.subdirectory: - fragments = f"egg={project_name}{extras}" - if self.subdirectory: - fragments = f"{fragments}&subdirectory={self.subdirectory}" - return f"{'-e ' if self.editable else ''}{url}#{fragments}{marker}" - delimiter = " @ " if project_name else "" - return f"{project_name}{extras}{delimiter}{url}{marker}" + return url def _parse_url(self) -> None: vcs, url_no_vcs = self.url.split("+", 1) @@ -429,18 +420,6 @@ def _parse_url(self) -> None: self.url = f"{vcs}+{repo}" self.repo, self.ref = repo, ref # type: ignore - @staticmethod - def _build_url_from_req_dict(name: str, url: str, req_dict: RequirementDict) -> str: - assert not isinstance(req_dict, str) - ref = f"@{req_dict['ref']}" if "ref" in req_dict else "" - fragments = f"#egg={urlparse.quote(name)}" - if "subdirectory" in req_dict: - fragments += ( - "&subdirectory=" # type: ignore - f"{urlparse.quote(req_dict.pop('subdirectory'))}" # type: ignore - ) - return f"{url}{ref}{fragments}" - def filter_requirements_with_extras( project_name: str, @@ -487,7 +466,7 @@ def parse_requirement(line: str, editable: bool = False) -> Requirement: else: try: package_req = PackageRequirement(line) # type: ignore - except (RequirementParseError, InvalidRequirement) as e: + except InvalidRequirement as e: m = _file_req_re.match(line) if m is None: raise RequirementError(str(e)) from None @@ -505,47 +484,3 @@ def parse_requirement(line: str, editable: bool = False) -> Requirement: " or local directory." ) return r - - -class MockDistribution(Distribution): - def __init__(self, data: Setup) -> None: - self._data = data - - def read_text(self, filename: str) -> str | None: - return None - - def locate_file(self, path: os.PathLike[str] | str) -> os.PathLike[str]: - return Path("") - - @property - def metadata(self) -> dict[str, Any]: # type: ignore - return { - "Name": self._data.name, - "Version": self._data.version, - "Summary": "UNKNOWN", - "Requires-Python": self._data.python_requires, - } - - @property - def requires(self) -> list[str] | None: - result = self._data.install_requires - for extra, reqs in self._data.extras_require.items(): - extra_marker = f"extra == '{extra}'" - for req in reqs: - parsed = parse_requirement(req) - old_marker = str(parsed.marker) if parsed.marker else None - if old_marker: - if " or " in old_marker: - new_marker = f"({old_marker}) and {extra_marker}" - else: - new_marker = f"{old_marker} and {extra_marker}" - else: - new_marker = extra_marker - parsed.marker = Marker(new_marker) - result.append(parsed.as_line()) - return result - - -def parse_metadata_from_source(src_dir: str) -> Distribution: - setup = Setup.from_directory(Path(src_dir)) - return MockDistribution(setup) diff --git a/pdm/models/search.py b/pdm/models/search.py new file mode 100644 index 0000000000..0a9e401cd2 --- /dev/null +++ b/pdm/models/search.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +import functools +from dataclasses import dataclass +from html.parser import HTMLParser +from typing import Callable + +from pdm._types import Package + + +@dataclass +class Result: + name: str = "" + version: str = "" + description: str = "" + + def as_package(self) -> Package: + return Package(self.name, self.version, self.description) + + +class SearchResultParser(HTMLParser): + """A simple HTML parser for pypi.org search results.""" + + def __init__(self) -> None: + super().__init__() + self.results: list[Package] = [] + self._current: Result | None = None + self._nest_anchors = 0 + self._data_callback: Callable[[str], None] | None = None + + @staticmethod + def _match_class(attrs: list[tuple[str, str | None]], name: str) -> bool: + attrs_map = dict(attrs) + return name in (attrs_map.get("class") or "").split() + + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: + if not self._current: + if tag == "a" and self._match_class(attrs, "package-snippet"): + self._current = Result() + self._nest_anchors = 1 + else: + if tag == "span" and self._match_class(attrs, "package-snippet__name"): + self._data_callback = functools.partial(setattr, self._current, "name") + elif tag == "span" and self._match_class(attrs, "package-snippet__version"): + self._data_callback = functools.partial( + setattr, self._current, "version" + ) + elif tag == "p" and self._match_class( + attrs, "package-snippet__description" + ): + self._data_callback = functools.partial( + setattr, self._current, "description" + ) + elif tag == "a": + self._nest_anchors += 1 + + def handle_data(self, data: str) -> None: + if self._data_callback is not None: + self._data_callback(data) + self._data_callback = None + + def handle_endtag(self, tag: str) -> None: + if tag != "a" or self._current is None: + return + self._nest_anchors -= 1 + if self._nest_anchors == 0: + if self._current.name and self._current.version: + self.results.append(self._current.as_package()) + self._current = None diff --git a/pdm/models/session.py b/pdm/models/session.py new file mode 100644 index 0000000000..55aafdca85 --- /dev/null +++ b/pdm/models/session.py @@ -0,0 +1,33 @@ +import functools +from pathlib import Path +from typing import Any + +from cachecontrol.adapter import CacheControlAdapter +from requests_toolbelt.utils import user_agent +from unearth.session import InsecureMixin, PyPISession + +from pdm.__version__ import __version__ + + +class InsecureCacheControlAdapter(InsecureMixin, CacheControlAdapter): + pass + + +class PDMSession(PyPISession): + def __init__(self, *, cache_dir: Path, **kwargs: Any) -> None: + from pdm.models.caches import SafeFileCache + + cache = SafeFileCache(str(cache_dir)) + self.secure_adapter_cls = functools.partial(CacheControlAdapter, cache=cache) + self.insecure_adapter_cls = functools.partial( + InsecureCacheControlAdapter, cache=cache + ) + super().__init__(**kwargs) + self.headers["User-Agent"] = self._make_user_agent() + + def _make_user_agent(self) -> str: + return ( + user_agent.UserAgentBuilder("pdm", __version__) + .include_implementation() + .build() + ) diff --git a/pdm/models/setup.py b/pdm/models/setup.py index 7aa7905d7d..1f1bda6cb3 100644 --- a/pdm/models/setup.py +++ b/pdm/models/setup.py @@ -66,7 +66,7 @@ def read_pyproject_toml(file: Path) -> Setup: from pdm.project.metadata import MutableMetadata try: - metadata = MutableMetadata(file) + metadata = MutableMetadata.from_file(file) except ValueError: return Setup() return Setup( diff --git a/pdm/models/specifiers.py b/pdm/models/specifiers.py index 620b672ac0..f16eb6ba43 100644 --- a/pdm/models/specifiers.py +++ b/pdm/models/specifiers.py @@ -6,7 +6,7 @@ from pathlib import Path from typing import Any, Iterable, List, Set, Tuple, Union, cast -from pip._vendor.packaging.specifiers import SpecifierSet +from packaging.specifiers import SpecifierSet from pdm.exceptions import InvalidPyVersion from pdm.models.versions import Version diff --git a/pdm/models/versions.py b/pdm/models/versions.py index cdc91b2294..db40e8fe73 100644 --- a/pdm/models/versions.py +++ b/pdm/models/versions.py @@ -1,7 +1,7 @@ import re from typing import Any, List, Optional, Tuple, Union, overload -from pdm._types import Literal +from pdm.compat import Literal from pdm.exceptions import InvalidPyVersion VersionBit = Union[int, Literal["*"]] diff --git a/pdm/models/working_set.py b/pdm/models/working_set.py index e8b929e76a..04afbac7d6 100644 --- a/pdm/models/working_set.py +++ b/pdm/models/working_set.py @@ -5,14 +5,9 @@ from pathlib import Path from typing import Iterable, Iterator, Mapping +from pdm.compat import importlib_metadata as im from pdm.utils import normalize_name -if sys.version_info >= (3, 8): - import importlib.metadata as im -else: - import importlib_metadata as im - - default_context = im.DistributionFinder.Context() diff --git a/pdm/project/config.py b/pdm/project/config.py index ee40aeebba..3185e93076 100644 --- a/pdm/project/config.py +++ b/pdm/project/config.py @@ -1,29 +1,54 @@ +from __future__ import annotations + +import collections import dataclasses import os from pathlib import Path -from typing import Any, Callable, Dict, Iterator, MutableMapping, Optional, Set, TypeVar +from typing import Any, Callable, Iterator, Mapping, MutableMapping, TypeVar -import click import platformdirs import tomlkit from pdm import termui -from pdm.exceptions import NoConfigError -from pdm.utils import get_pypi_source +from pdm.exceptions import NoConfigError, PdmUsageError T = TypeVar("T") +ui = termui.UI() + +REPOSITORY = "repository" + + +@dataclasses.dataclass +class RepositoryConfig: + url: str + username: str | None = None + password: str | None = None + def __rich__(self) -> str: + lines = [f"[cyan]url[/] = {self.url}"] + if self.username: + lines.append(f"[cyan]username[/] = {self.username}") + if self.password: + lines.append("[cyan]password[/] = ") + return "\n".join(lines) -def load_config(file_path: Path) -> Dict[str, Any]: + +DEFAULT_REPOSITORIES = { + "pypi": RepositoryConfig("https://upload.pypi.org/legacy/"), + "testpypi": RepositoryConfig("https://test.pypi.org/legacy/"), +} + + +def load_config(file_path: Path) -> dict[str, Any]: """Load a nested TOML document into key-value paires E.g. ["python"]["path"] will be loaded as "python.path" key. """ - def get_item(sub_data: Dict[str, Any]) -> Dict[str, Any]: + def get_item(sub_data: Mapping[str, Any]) -> Mapping[str, Any]: result = {} for k, v in sub_data.items(): - if getattr(v, "items", None) is not None: + if k != REPOSITORY and isinstance(v, Mapping): result.update( {f"{k}.{sub_k}": sub_v for sub_k, sub_v in get_item(v).items()} ) @@ -44,6 +69,9 @@ def ensure_boolean(val: Any) -> bool: return bool(val) and val.lower() not in ("false", "no", "0") +DEFAULT_PYPI_INDEX = "https://pypi.org/simple" + + @dataclasses.dataclass class ConfigItem: """An item of configuration, with following attributes: @@ -62,9 +90,9 @@ class ConfigItem: description: str default: Any = _NOT_SET global_only: bool = False - env_var: Optional[str] = None + env_var: str | None = None coerce: Callable = str - replace: Optional[str] = None + replace: str | None = None def should_show(self) -> bool: return self.default is not self._NOT_SET @@ -73,8 +101,7 @@ def should_show(self) -> bool: class Config(MutableMapping[str, str]): """A dict-like object for configuration key and values""" - pypi_url, verify_ssl = get_pypi_source() - _config_map: Dict[str, ConfigItem] = { + _config_map: dict[str, ConfigItem] = { "cache_dir": ConfigItem( "The root directory of cached files", platformdirs.user_cache_dir("pdm"), @@ -84,6 +111,7 @@ class Config(MutableMapping[str, str]): "Check if there is any newer version available", True, True, + env_var="PDM_CHECK_UPDATE", coerce=ensure_boolean, ), "build_isolation": ConfigItem( @@ -102,7 +130,7 @@ class Config(MutableMapping[str, str]): ), "global_project.path": ConfigItem( "The path to the global project", - os.path.expanduser("~/.pdm/global-project"), + platformdirs.user_config_path("pdm") / "global-project", True, ), "global_project.user_site": ConfigItem( @@ -158,11 +186,11 @@ class Config(MutableMapping[str, str]): ), "pypi.url": ConfigItem( "The URL of PyPI mirror, defaults to https://pypi.org/simple", - pypi_url, + DEFAULT_PYPI_INDEX, env_var="PDM_PYPI_URL", ), "pypi.verify_ssl": ConfigItem( - "Verify SSL certificate when query PyPI", verify_ssl, coerce=ensure_boolean + "Verify SSL certificate when query PyPI", True, coerce=ensure_boolean ), "pypi.json_api": ConfigItem( "Consult PyPI's JSON API for package metadata", @@ -171,10 +199,9 @@ class Config(MutableMapping[str, str]): coerce=ensure_boolean, ), } - del pypi_url, verify_ssl @classmethod - def get_defaults(cls) -> Dict[str, Any]: + def get_defaults(cls) -> dict[str, Any]: return {k: v.default for k, v in cls._config_map.items() if v.should_show()} @classmethod @@ -183,22 +210,20 @@ def add_config(cls, name: str, item: ConfigItem) -> None: cls._config_map[name] = item def __init__(self, config_file: Path, is_global: bool = False): - self._data = {} - if is_global: - self._data.update(self.get_defaults()) - self.is_global = is_global self.config_file = config_file.resolve() self._file_data = load_config(self.config_file) self.deprecated = { v.replace: k for k, v in self._config_map.items() if v.replace } - self._data.update(self._file_data) + self._data = collections.ChainMap( + self._file_data, self.get_defaults() if is_global else {} + ) def _save_config(self) -> None: """Save the changed to config file.""" self.config_file.parent.mkdir(parents=True, exist_ok=True) - toml_data: Dict[str, Any] = {} + toml_data: dict[str, Any] = {} for key, value in self._file_data.items(): *parts, last = key.split(".") temp = toml_data @@ -212,6 +237,19 @@ def _save_config(self) -> None: tomlkit.dump(toml_data, fp) # type: ignore def __getitem__(self, key: str) -> Any: + parts = key.split(".") + if parts[0] == REPOSITORY: + if len(parts) < 2: + raise PdmUsageError("Must specify a repository name") + repo = self.get_repository_config(parts[1]) + if repo is None: + raise KeyError(f"No repository named {parts[1]}") + + value = getattr(repo, parts[2]) if len(parts) >= 3 else repo + if len(parts) >= 3 and parts[2] == "password" and value: + return "" + return value + if key not in self._config_map and key not in self.deprecated: raise NoConfigError(key) config_key = self.deprecated.get(key, key) @@ -229,6 +267,17 @@ def __getitem__(self, key: str) -> Any: return config.coerce(result) def __setitem__(self, key: str, value: Any) -> None: + parts = key.split(".") + if parts[0] == REPOSITORY: + if len(parts) < 3: + raise PdmUsageError( + "Set repository config with [green]repository.{name}.{attr}" + ) + self._file_data.setdefault(parts[0], {}).setdefault( + parts[1], {} + ).setdefault(parts[2], value) + self._save_config() + return if key not in self._config_map and key not in self.deprecated: raise NoConfigError(key) config_key = self.deprecated.get(key, key) @@ -241,16 +290,13 @@ def __setitem__(self, key: str, value: Any) -> None: value = config.coerce(value) env_var = config.env_var if env_var is not None and env_var in os.environ: - click.echo( - termui.yellow( - "WARNING: the config is shadowed by env var '{}', " - "the value set won't take effect.".format(env_var) - ) + ui.echo( + "WARNING: the config is shadowed by env var '{}', " + "the value set won't take effect.".format(env_var), + style="yellow", ) - self._data[config_key] = value self._file_data[config_key] = value if config.replace: - self._data.pop(config.replace, None) self._file_data.pop(config.replace, None) self._save_config() @@ -258,7 +304,7 @@ def __len__(self) -> int: return len(self._data) def __iter__(self) -> Iterator[str]: - keys: Set[str] = set() + keys: set[str] = set() for key in self._data: if key in self._config_map: keys.add(key) @@ -267,22 +313,53 @@ def __iter__(self) -> Iterator[str]: return iter(keys) def __delitem__(self, key: str) -> None: + parts = key.split(".") + if parts[0] == REPOSITORY: + if len(parts) < 2: + raise PdmUsageError("Should specify the name of repository") + if len(parts) >= 3: + del self._file_data.get(REPOSITORY, {}).get(parts[1], {})[parts[2]] + else: + del self._file_data.get(REPOSITORY, {})[parts[1]] + self._save_config() + return config_key = self.deprecated.get(key, key) config = self._config_map[config_key] - self._data.pop(config_key, None) self._file_data.pop(config_key, None) - if self.is_global and config.should_show(): - self._data[config_key] = config.default if config.replace: - self._data.pop(config.replace, None) self._file_data.pop(config.replace, None) env_var = config.env_var if env_var is not None and env_var in os.environ: - click.echo( - termui.yellow( - "WARNING: the config is shadowed by env var '{}', " - "set value won't take effect.".format(env_var) - ) + ui.echo( + "WARNING: the config is shadowed by env var '{}', " + "set value won't take effect.".format(env_var), + style="yellow", ) self._save_config() + + def get_repository_config(self, name_or_url: str) -> RepositoryConfig | None: + """Get a repository by name or url.""" + if not self.is_global: # pragma: no cover + raise PdmUsageError("No repository config in project config.") + repositories: Mapping[str, Mapping[str, str | None]] = self._data.get( + REPOSITORY, {} + ) + repo: RepositoryConfig | None = None + if "://" in name_or_url: + config: Mapping[str, str | None] = next( + (v for v in repositories.values() if v.get("url") == name_or_url), {} + ) + repo = next( + (r for r in DEFAULT_REPOSITORIES.values() if r.url == name_or_url), + RepositoryConfig(name_or_url), + ) + else: + config = repositories.get(name_or_url, {}) + if name_or_url in DEFAULT_REPOSITORIES: + repo = DEFAULT_REPOSITORIES[name_or_url] + if repo: + return dataclasses.replace(repo, **config) + if not config: + return None + return RepositoryConfig(**config) # type: ignore diff --git a/pdm/project/core.py b/pdm/project/core.py index 36022daf39..9204788f3e 100644 --- a/pdm/project/core.py +++ b/pdm/project/core.py @@ -10,18 +10,18 @@ from typing import TYPE_CHECKING, Any, Iterable, Type, cast from urllib.parse import urlparse +import platformdirs import tomlkit from findpython import Finder from pdm import termui from pdm._types import Source from pdm.exceptions import NoPythonVersion, PdmUsageError, ProjectError -from pdm.models import pip_shims -from pdm.models.caches import CandidateInfoCache, HashCache +from pdm.models.caches import CandidateInfoCache, HashCache, WheelCache from pdm.models.candidates import Candidate from pdm.models.environment import Environment, GlobalEnvironment from pdm.models.python import PythonInfo -from pdm.models.repositories import BaseRepository, LockedRepository, PyPIRepository +from pdm.models.repositories import BaseRepository, LockedRepository from pdm.models.requirements import Requirement, parse_requirement, strip_extras from pdm.models.specifiers import PySpecSet, get_specifier from pdm.project.config import Config @@ -37,12 +37,13 @@ get_in_project_venv_python, get_venv_like_prefix, normalize_name, + path_to_url, ) if TYPE_CHECKING: from resolvelib.reporters import BaseReporter + from rich.status import Status - from pdm._vendor import halo from pdm.core import Core from pdm.resolver.providers import BaseProvider @@ -78,7 +79,7 @@ def __init__( self.core = core if global_config is None: - global_config = Path.home() / ".pdm/config.toml" + global_config = platformdirs.user_config_path("pdm") / "config.toml" self.global_config = Config(Path(global_config), is_global=True) global_project = Path(self.global_config["global_project.path"]) @@ -95,7 +96,7 @@ def __init__( ): self.core.ui.echo( "Project is not found, fallback to the global project", - fg="yellow", + style="yellow", err=True, ) root_path = global_project @@ -266,9 +267,11 @@ def python_requires(self) -> PySpecSet: def get_dependencies(self, group: str | None = None) -> dict[str, Requirement]: metadata = self.meta + group = group or "default" optional_dependencies = metadata.get("optional-dependencies", {}) dev_dependencies = self.tool_settings.get("dev-dependencies", {}) - if group in (None, "default"): + in_metadata = group == "default" or group in optional_dependencies + if group == "default": deps = metadata.get("dependencies", []) else: if group in optional_dependencies and group in dev_dependencies: @@ -276,7 +279,7 @@ def get_dependencies(self, group: str | None = None) -> dict[str, Requirement]: f"The {group} group exists in both [optional-dependencies] " "and [dev-dependencies], the former is taken.", err=True, - fg="yellow", + style="yellow", ) if group in optional_dependencies: deps = optional_dependencies[group] @@ -288,6 +291,15 @@ def get_dependencies(self, group: str | None = None) -> dict[str, Requirement]: with cd(self.root): for line in deps: if line.startswith("-e "): + if in_metadata: + self.core.ui.echo( + f"WARNING: Skipping editable dependency [b]{line}[/] in the" + r" [green]\[project][/] table. Please move it to the " + r"[green]\[tool.pdm.dev-dependencies][/] table", + err=True, + style="yellow", + ) + continue req = parse_requirement(line[3:].strip(), True) else: req = parse_requirement(line) @@ -332,18 +344,23 @@ def all_dependencies(self) -> dict[str, dict[str, Requirement]]: def allow_prereleases(self) -> bool | None: return self.tool_settings.get("allow_prereleases") + @property + def default_source(self) -> Source: + """Get the default source of from the pypi setting""" + return cast( + "Source", + { + "url": self.config["pypi.url"], + "verify_ssl": self.config["pypi.verify_ssl"], + "name": "pypi", + }, + ) + @property def sources(self) -> list[Source]: sources = list(self.tool_settings.get("source", [])) if all(source.get("name") != "pypi" for source in sources): - sources.insert( - 0, - { - "url": self.config["pypi.url"], - "verify_ssl": self.config["pypi.verify_ssl"], - "name": "pypi", - }, - ) + sources.insert(0, self.default_source) expanded_sources: list[Source] = [ Source( url=expand_env_vars_in_auth(s["url"]), @@ -358,7 +375,7 @@ def sources(self) -> list[Source]: def get_repository(self, cls: Type[BaseRepository] | None = None) -> BaseRepository: """Get the repository object""" if cls is None: - cls = PyPIRepository + cls = self.core.repository_class sources = self.sources or [] return cls(sources, self.environment) @@ -393,7 +410,7 @@ def get_provider( ReusePinProvider, ) - repository = self.get_repository(cls=self.core.repository_class) + repository = self.get_repository() allow_prereleases = self.allow_prereleases overrides = { normalize_name(k): v @@ -402,7 +419,7 @@ def get_provider( if strategy != "all" and not self.is_lockfile_compatible(): self.core.ui.echo( "Updating the whole lock file as it is not compatible with PDM", - fg="yellow", + style="yellow", err=True, ) strategy = "all" @@ -428,7 +445,7 @@ def get_reporter( self, requirements: list[Requirement], tracked_names: Iterable[str] | None = None, - spinner: halo.Halo | termui.DummySpinner | None = None, + spinner: Status | termui.DummySpinner | None = None, ) -> BaseReporter: """Return the reporter object to construct a resolver. @@ -455,13 +472,13 @@ def write_lockfile( with atomic_open_for_write(self.lockfile_file) as fp: tomlkit.dump(toml_data, fp) # type: ignore if show_message: - self.core.ui.echo(f"Changes are written to {termui.green('pdm.lock')}.") + self.core.ui.echo("Changes are written to [green]pdm.lock[/].") self._lockfile = None else: self._lockfile = toml_data def make_self_candidate(self, editable: bool = True) -> Candidate: - req = parse_requirement(pip_shims.path_to_url(self.root.as_posix()), editable) + req = parse_requirement(path_to_url(self.root.as_posix()), editable) req.name = self.meta.name return Candidate(req, name=self.meta.name, version=self.meta.version) @@ -526,14 +543,13 @@ def add_dependencies( to_group: str = "default", dev: bool = False, show_message: bool = True, - replace_editable: bool = False, ) -> None: deps = self.get_pyproject_dependencies(to_group, dev).multiline( # type: ignore True ) for _, dep in requirements.items(): matched_index = next( - (i for i, r in enumerate(deps) if dep.matches(r, not replace_editable)), + (i for i, r in enumerate(deps) if dep.matches(r)), None, ) if matched_index is None: @@ -549,19 +565,14 @@ def write_pyproject(self, show_message: bool = True) -> None: ) as f: tomlkit.dump(self.pyproject, f) # type: ignore if show_message: - self.core.ui.echo( - f"Changes are written to {termui.green('pyproject.toml')}." - ) + self.core.ui.echo("Changes are written to [green]pyproject.toml[/].") self._pyproject = None @property def meta(self) -> Metadata: if not self.pyproject: self.pyproject = {"project": tomlkit.table()} - m = Metadata(self.pyproject_file, False) - m._metadata = self.pyproject.get("project", {}) - m._tool_settings = self.tool_settings - return m + return Metadata(self.root, self.pyproject) def init_global_project(self) -> None: if not self.is_global: @@ -585,13 +596,10 @@ def cache(self, name: str) -> Path: path.mkdir(parents=True, exist_ok=True) return path - def make_wheel_cache(self) -> pip_shims.WheelCache: - return pip_shims.WheelCache( - self.cache_dir.as_posix(), pip_shims.FormatControl(set(), set()) - ) + def make_wheel_cache(self) -> WheelCache: + return WheelCache(self.cache("wheels")) def make_candidate_info_cache(self) -> CandidateInfoCache: - python_hash = hashlib.sha1( str(self.environment.python_requires).encode() ).hexdigest() @@ -599,7 +607,7 @@ def make_candidate_info_cache(self) -> CandidateInfoCache: return CandidateInfoCache(self.cache("metadata") / file_name) def make_hash_cache(self) -> HashCache: - return HashCache(directory=self.cache("hashes").as_posix()) + return HashCache(directory=self.cache("hashes")) def find_interpreters(self, python_spec: str | None = None) -> Iterable[PythonInfo]: """Return an iterable of interpreter paths that matches the given specifier, diff --git a/pdm/project/metadata.py b/pdm/project/metadata.py index ba42832f7c..f50fef4499 100644 --- a/pdm/project/metadata.py +++ b/pdm/project/metadata.py @@ -1,7 +1,15 @@ +from __future__ import annotations + +import os from collections.abc import MutableMapping -from typing import Dict, Iterator, List, TypeVar, Union +from pathlib import Path +from typing import Any, Iterator, TypeVar +from pdm.compat import Distribution, tomllib from pdm.formats import flit, poetry +from pdm.models.markers import Marker +from pdm.models.requirements import parse_requirement +from pdm.models.setup import Setup from pdm.pep517.metadata import Metadata T = TypeVar("T") @@ -13,34 +21,79 @@ class MutableMetadata(Metadata, MutableMapping): to the underlying toml parsed dict. """ - def _read_pyproject(self) -> None: + def __init__(self, root: str | Path, pyproject: dict[str, Any]) -> None: try: - return super()._read_pyproject() - except ValueError: - for converter in (poetry, flit): - if converter.check_fingerprint(None, self.filepath): # type: ignore - data, settings = converter.convert( # type: ignore - None, self.filepath, None + super().__init__(root, pyproject) + except ValueError as e: + for converter in (flit, poetry): + filename = os.path.join(root, "pyproject.toml") + if converter.check_fingerprint(None, filename): + data, settings = converter.convert(None, filename, None) + pyproject.setdefault("project", {}).update(data) + pyproject.setdefault("tool", {}).setdefault("pdm", {}).update( + settings ) - self._metadata = dict(data) - self._tool_settings = settings - return - raise + return super().__init__(root, pyproject) + raise e from None + + @classmethod + def from_file(cls, filename: str | Path) -> "MutableMetadata": + """Get the metadata from a pyproject.toml file""" + return cls(os.path.dirname(filename), tomllib.load(open(filename, "rb"))) - def __getitem__(self, k: str) -> Union[Dict, List[str], str]: - return self._metadata[k] + def __getitem__(self, k: str) -> dict | list[str] | str: + return self.data[k] - def __setitem__(self, k: str, v: Union[Dict, List[str], str]) -> None: - self._metadata[k] = v + def __setitem__(self, k: str, v: dict | list[str] | str) -> None: + self.data[k] = v def __delitem__(self, k: str) -> None: - del self._metadata[k] + del self.data[k] def __iter__(self) -> Iterator: - return iter(self._metadata) + return iter(self.data) def __len__(self) -> int: - return len(self._metadata) + return len(self.data) def setdefault(self, key: str, default: T) -> T: # type: ignore - return self._metadata.setdefault(key, default) + return self.data.setdefault(key, default) + + +class SetupDistribution(Distribution): + def __init__(self, data: Setup) -> None: + self._data = data + + def read_text(self, filename: str) -> str | None: + return None + + def locate_file(self, path: os.PathLike[str] | str) -> os.PathLike[str]: + return Path("") + + @property + def metadata(self) -> dict[str, Any]: # type: ignore + return { + "Name": self._data.name, + "Version": self._data.version, + "Summary": "UNKNOWN", + "Requires-Python": self._data.python_requires, + } + + @property + def requires(self) -> list[str] | None: + result = self._data.install_requires + for extra, reqs in self._data.extras_require.items(): + extra_marker = f"extra == '{extra}'" + for req in reqs: + parsed = parse_requirement(req) + old_marker = str(parsed.marker) if parsed.marker else None + if old_marker: + if " or " in old_marker: + new_marker = f"({old_marker}) and {extra_marker}" + else: + new_marker = f"{old_marker} and {extra_marker}" + else: + new_marker = extra_marker + parsed.marker = Marker(new_marker) + result.append(parsed.as_line()) + return result diff --git a/pdm/resolver/core.py b/pdm/resolver/core.py index 21ea85da1f..4a6993c229 100644 --- a/pdm/resolver/core.py +++ b/pdm/resolver/core.py @@ -42,6 +42,4 @@ def resolve( mapping[new_key] = mapping.pop(key) key = new_key - candidate.hashes = provider.repository.get_hashes(candidate) - return mapping, provider.fetched_dependencies diff --git a/pdm/resolver/providers.py b/pdm/resolver/providers.py index f6eb8dc4f1..be4f65d339 100644 --- a/pdm/resolver/providers.py +++ b/pdm/resolver/providers.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Callable, cast from packaging.specifiers import InvalidSpecifier, SpecifierSet from resolvelib import AbstractProvider @@ -65,7 +65,7 @@ def get_preference( candidates: dict[str, Iterator[Candidate]], information: dict[str, Iterator[RequirementInformation]], backtrack_causes: Sequence[RequirementInformation], - ) -> Comparable: + ) -> tuple[Comparable, ...]: is_top = any(parent is None for _, parent in information[identifier]) is_backtrack_cause = any( requirement.identify() == identifier @@ -122,7 +122,7 @@ def get_override_candidates(self, identifier: str) -> Iterable[Candidate]: def _find_candidates(self, requirement: Requirement) -> Iterable[Candidate]: if not requirement.is_named: can = Candidate(requirement) - can.prepare(self.repository.environment).prepare_metadata() + can.prepare(self.repository.environment).metadata return [can] else: return self.repository.find_candidates( @@ -134,20 +134,24 @@ def find_matches( identifier: str, requirements: Mapping[str, Iterator[Requirement]], incompatibilities: Mapping[str, Iterator[Candidate]], - ) -> Iterable[Candidate]: - incompat = list(incompatibilities[identifier]) - if identifier == "python": - candidates = find_python_matches(identifier, requirements) - return [c for c in candidates if c not in incompat] - elif identifier in self.overrides: - return self.get_override_candidates(identifier) - reqs = sorted(requirements[identifier], key=self.requirement_preference) - candidates = self._find_candidates(reqs[0]) - return [ - can - for can in candidates - if can not in incompat and all(self.is_satisfied_by(r, can) for r in reqs) - ] + ) -> Callable[[], Iterator[Candidate]]: + def matches_gen() -> Iterator[Candidate]: + incompat = list(incompatibilities[identifier]) + if identifier == "python": + candidates = find_python_matches(identifier, requirements) + return (c for c in candidates if c not in incompat) + elif identifier in self.overrides: + return iter(self.get_override_candidates(identifier)) + reqs = sorted(requirements[identifier], key=self.requirement_preference) + candidates = self._find_candidates(reqs[0]) + return ( + can + for can in candidates + if can not in incompat + and all(self.is_satisfied_by(r, can) for r in reqs) + ) + + return matches_gen def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool: if isinstance(requirement, PythonRequirement): @@ -156,15 +160,19 @@ def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> boo return True if not requirement.is_named: return not candidate.req.is_named and url_without_fragments( - candidate.req.url - ) == url_without_fragments(requirement.url) - version = candidate.version or candidate.metadata.version + candidate.req.url # type: ignore + ) == url_without_fragments( + requirement.url # type: ignore + ) + version = candidate.version # Allow prereleases if: 1) it is not specified in the tool settings or # 2) the candidate doesn't come from PyPI index. allow_prereleases = ( self.allow_prereleases in (True, None) or not candidate.req.is_named ) - return requirement.specifier.contains(version, allow_prereleases) + return cast(SpecifierSet, requirement.specifier).contains( + version, allow_prereleases + ) def get_dependencies(self, candidate: Candidate) -> list[Requirement]: if isinstance(candidate, PythonCandidate): @@ -222,20 +230,28 @@ def find_matches( identifier: str, requirements: Mapping[str, Iterator[Requirement]], incompatibilities: Mapping[str, Iterator[Candidate]], - ) -> Iterable[Candidate]: + ) -> Callable[[], Iterator[Candidate]]: + super_find = super().find_matches(identifier, requirements, incompatibilities) bare_name = strip_extras(identifier)[0] - if bare_name not in self.tracked_names and identifier in self.preferred_pins: - pin = self.preferred_pins[identifier] - incompat = list(incompatibilities[identifier]) - demanded_req = next(requirements[identifier], None) - if demanded_req and demanded_req.is_named: - pin.req = demanded_req - pin._preferred = True - if pin not in incompat and all( - self.is_satisfied_by(r, pin) for r in requirements[identifier] + + def matches_gen() -> Iterator[Candidate]: + if ( + bare_name not in self.tracked_names + and identifier in self.preferred_pins ): - yield pin - yield from super().find_matches(identifier, requirements, incompatibilities) + pin = self.preferred_pins[identifier] + incompat = list(incompatibilities[identifier]) + demanded_req = next(requirements[identifier], None) + if demanded_req and demanded_req.is_named: + pin.req = demanded_req + pin._preferred = True # type: ignore + if pin not in incompat and all( + self.is_satisfied_by(r, pin) for r in requirements[identifier] + ): + yield pin + yield from super_find() + + return matches_gen class EagerUpdateProvider(ReusePinProvider): @@ -276,7 +292,7 @@ def get_preference( candidates: dict[str, Iterator[Candidate]], information: dict[str, Iterator[RequirementInformation]], backtrack_causes: Sequence[RequirementInformation], - ) -> Comparable: + ) -> tuple[Comparable, ...]: # Resolve tracking packages so we have a chance to unpin them first. (python, *others) = super().get_preference( identifier, resolutions, candidates, information, backtrack_causes diff --git a/pdm/resolver/python.py b/pdm/resolver/python.py index 938d24366e..48aa0742fa 100644 --- a/pdm/resolver/python.py +++ b/pdm/resolver/python.py @@ -4,7 +4,6 @@ from typing import Iterable, Iterator, Mapping, cast -from pdm import termui from pdm.models.candidates import Candidate from pdm.models.requirements import NamedRequirement, Requirement from pdm.models.specifiers import PySpecSet @@ -12,10 +11,7 @@ class PythonCandidate(Candidate): def format(self) -> str: - return ( - f"{termui.green(self.name, bold=True)} " - f"{termui.yellow(str(self.req.specifier))}" - ) + return f"[bold green]{self.name}[/]" f"[yellow]{str(self.req.specifier)}[/]" class PythonRequirement(NamedRequirement): diff --git a/pdm/resolver/reporters.py b/pdm/resolver/reporters.py index 64ddca178c..e509f44944 100644 --- a/pdm/resolver/reporters.py +++ b/pdm/resolver/reporters.py @@ -9,8 +9,8 @@ if TYPE_CHECKING: from resolvelib.resolvers import RequirementInformation, State # type: ignore + from rich.status import Status - from pdm._vendor import halo from pdm.models.candidates import Candidate from pdm.models.requirements import Requirement @@ -24,7 +24,7 @@ def log_title(title: str) -> None: class SpinnerReporter(BaseReporter): def __init__( - self, spinner: halo.Halo | termui.DummySpinner, requirements: List[Requirement] + self, spinner: Status | termui.DummySpinner, requirements: List[Requirement] ) -> None: self.spinner = spinner self.requirements = requirements @@ -81,7 +81,7 @@ def backtracking(self, candidate: Candidate) -> None: def pinning(self, candidate: Candidate) -> None: """Called when adding a candidate to the potential solution.""" - self.spinner.text = f"Resolving: new pin {candidate.format()}" + self.spinner.update(f"Resolving: new pin {candidate.format()}") logger.info("Pinning: %s %s", candidate.name, candidate.version) def resolving_conflicts(self, causes: list[RequirementInformation]) -> None: diff --git a/pdm/signals.py b/pdm/signals.py index ddfa3a53f0..bff3a6ffd1 100644 --- a/pdm/signals.py +++ b/pdm/signals.py @@ -71,3 +71,54 @@ def on_post_install(project, candidates, dry_run): artifacts (Sequence[str]): The locations of built artifacts config_settings (dict[str, str]|None): Additional config settings passed via args """ +pre_publish: NamedSignal = pdm_signals.signal("pre_publish") +"""Called before a project is published. + +Args: + project (Project): The project object +""" +post_publish: NamedSignal = pdm_signals.signal("post_publish") +"""Called after a project is published. + +Args: + project (Project): The project object +""" +pre_run: NamedSignal = pdm_signals.signal("pre_run") +"""Called before any run. + +Args: + project (Project): The project object + script (str): the script name + args (Sequence[str]): the command line provided arguments +""" +post_run: NamedSignal = pdm_signals.signal("post_run") +"""Called after any run. + +Args: + project (Project): The project object + script (str): the script name + args (Sequence[str]): the command line provided arguments +""" +pre_script: NamedSignal = pdm_signals.signal("pre_script") +"""Called before any script. + +Args: + project (Project): The project object + script (str): the script name + args (Sequence[str]): the command line provided arguments +""" +post_script: NamedSignal = pdm_signals.signal("post_script") +"""Called after any script. + +Args: + project (Project): The project object + script (str): the script name + args (Sequence[str]): the command line provided arguments +""" +post_use: NamedSignal = pdm_signals.signal("post_use") +"""Called after use switched to a new Python version. + +Args: + project (Project): The project object + python (PythonInfo): Informations about the new Python interpreter +""" diff --git a/pdm/termui.py b/pdm/termui.py index cf8bfb5c96..06ccc04b61 100644 --- a/pdm/termui.py +++ b/pdm/termui.py @@ -2,90 +2,131 @@ import atexit import contextlib -import functools -import io +import enum import logging import os -import sys -from itertools import zip_longest from tempfile import mktemp -from typing import Any, Callable, Iterator, List, Optional, Sequence, Union +from typing import Any, Iterator, Sequence, Type -import click -from click._compat import strip_ansi +from rich.box import ROUNDED +from rich.console import Console +from rich.logging import RichHandler +from rich.progress import Progress, ProgressColumn +from rich.prompt import Confirm, IntPrompt, Prompt +from rich.table import Table -from pdm._vendor import colorama, halo -from pdm._vendor.log_symbols.symbols import is_supported as supports_unicode +from pdm._types import Spinner, SpinnerT logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) logger.addHandler(logging.NullHandler()) +unearch_logger = logging.getLogger("unearth") +unearch_logger.setLevel(logging.DEBUG) -def ljust(text: str, length: int) -> str: - """Like str.ljust() but ignore all ANSI controlling characters.""" - return text + " " * (length - len(strip_ansi(text))) +_console = Console(highlight=False) +_err_console = Console(stderr=True) -def rjust(text: str, length: int) -> str: - """Like str.rjust() but ignore all ANSI controlling characters.""" - return " " * (length - len(strip_ansi(text))) + text +def is_interactive(console: Console | None = None) -> bool: + """Check if the terminal is run under interactive mode""" + if console is None: + console = _console + return console.is_interactive -def centerize(text: str, length: int) -> str: - """Centerize the text while ignoring ANSI controlling characters.""" - space_num = length - len(strip_ansi(text)) - left_space = space_num // 2 - return " " * left_space + text + " " * (space_num - left_space) +def is_legacy_windows(console: Console | None = None) -> bool: + """Legacy Windows renderer may have problem rendering emojis""" + if console is None: + console = _console + return console.legacy_windows -def supports_ansi() -> bool: - if os.getenv("CI") or not hasattr(sys.stdout, "fileno"): - return False - if sys.platform == "win32": - return ( - os.getenv("ANSICON") is not None - or os.getenv("WT_SESSION") is not None - or "ON" == os.getenv("ConEmuANSI") - or "xterm" == os.getenv("Term") - ) +def style( + text: str, + *args: str, + style: str = None, + **kwargs: Any, +) -> str: + """return text with ansi codes using rich console - try: - return os.isatty(sys.stdout.fileno()) - except io.UnsupportedOperation: - return False + :param text: message with rich markup, defaults to "". + :param style: rich style to apply to whole string + :return: string containing ansi codes + """ + with _console.capture() as capture: + _console.print(text, *args, end="", style=style, **kwargs) + return capture.get() -# Export some style shortcut helpers -green = functools.partial(click.style, fg="green") -red = functools.partial(click.style, fg="red") -yellow = functools.partial(click.style, fg="yellow") -cyan = functools.partial(click.style, fg="cyan") -blue = functools.partial(click.style, fg="blue") -bold = functools.partial(click.style, bold=True) -# Verbosity levels -NORMAL = 0 -DETAIL = 1 -DEBUG = 2 +def confirm(*args: str, **kwargs: Any) -> str: + return Confirm.ask(*args, **kwargs) -class DummySpinner: - """A dummy spinner class implementing needed interfaces. - But only display text onto screen. +def ask( + *args: str, prompt_type: Type[str] | Type[int] | None = None, **kwargs: Any +) -> str: + """prompt user and return reponse + + :prompt_type: which rich prompt to use, defaults to str. + :raises ValueError: unsupported prompt type + :return: str of user's selection """ + if not prompt_type or prompt_type is str: + return Prompt.ask(*args, **kwargs) + elif prompt_type is int: + return str(IntPrompt.ask(*args, **kwargs)) + else: + raise ValueError(f"unsupported {prompt_type}") + + +class Verbosity(enum.IntEnum): + NORMAL = 0 + DETAIL = enum.auto() + DEBUG = enum.auto() + - def start(self, text: str) -> None: - click.echo(text) +LOG_LEVELS = { + Verbosity.NORMAL: logging.WARN, + Verbosity.DETAIL: logging.INFO, + Verbosity.DEBUG: logging.DEBUG, +} - def stop_and_persist(self, symbol: str = " ", text: Optional[str] = None) -> None: - click.echo(symbol + " " + (text or "")) - succeed = fail = start +class Emoji: + if is_legacy_windows(): + SUCC = "v" + FAIL = "x" + LOCK = " " + CONGRAT = " " + POPPER = " " + ELLIPSIS = "..." + ARROW_SEPARATOR = ">" + else: + SUCC = ":heavy_check_mark:" + FAIL = ":heavy_multiplication_x:" + LOCK = ":lock:" + POPPER = ":party_popper:" + ELLIPSIS = "…" + ARROW_SEPARATOR = "➤" + + +if is_legacy_windows(): + SPINNER = "line" +else: + SPINNER = "dots" + + +class DummySpinner: + """A dummy spinner class implementing needed interfaces. + But only display text onto screen. + """ - text = property(lambda self: "", start) + def update(self, text: str) -> None: + self.text = text - def __enter__(self) -> DummySpinner: + def __enter__(self: SpinnerT) -> SpinnerT: return self def __exit__(self, *args: Any) -> None: @@ -95,32 +136,34 @@ def __exit__(self, *args: Any) -> None: class UI: """Terminal UI object""" - def __init__(self, verbosity: int = NORMAL, no_ansi: Optional[bool] = None) -> None: + def __init__(self, verbosity: Verbosity = Verbosity.NORMAL) -> None: self.verbosity = verbosity - self._indent = "" - self.supports_ansi = not no_ansi if no_ansi is not None else supports_ansi() - if not self.supports_ansi: - colorama.init() - else: - colorama.deinit() def set_verbosity(self, verbosity: int) -> None: - self.verbosity = verbosity + self.verbosity = Verbosity(verbosity) def echo( self, message: str = "", err: bool = False, - verbosity: int = NORMAL, + verbosity: Verbosity = Verbosity.NORMAL, **kwargs: Any, ) -> None: + """print message using rich console + + :param message: message with rich markup, defaults to "". + :param err: if true print to stderr, defaults to False. + :param verbosity: verbosity level, defaults to NORMAL. + """ if self.verbosity >= verbosity: - click.secho( - self._indent + str(message), err=err, color=self.supports_ansi, **kwargs - ) + console = _err_console if err else _console + if not console.is_interactive: + kwargs.setdefault("crop", False) + kwargs.setdefault("overflow", "ignore") + console.print(message, **kwargs) def display_columns( - self, rows: Sequence[Sequence[str]], header: Optional[List[str]] = None + self, rows: Sequence[Sequence[str]], header: list[str] | None = None ) -> None: """Print rows in aligned columns. @@ -128,51 +171,24 @@ def display_columns( :param header: a list of header strings. """ - def get_aligner(align: str) -> Callable: - if align == ">": - return rjust - if align == "^": - return centerize - else: - return ljust - - sizes = list( - map( - lambda column: max(map(lambda x: len(strip_ansi(x)), column)), - zip_longest(header or [], *rows, fillvalue=""), - ) - ) - - aligners = [ljust] * len(sizes) if header: - aligners = [] - for i, head in enumerate(header): - aligners.append(get_aligner(head[0])) - if head[0] in (">", "^", "<"): - header[i] = head[1:] - self.echo( - " ".join( - aligner(head, size) - for aligner, head, size in zip(aligners, header, sizes) - ).rstrip() - ) - # Print a separator - self.echo(" ".join("-" * size for size in sizes)) + table = Table(box=ROUNDED) + for title in header: + if title[0] == "^": + title, justify = title[1:], "center" + elif title[0] == ">": + title, justify = title[1:], "right" + else: + title, justify = title, "left" + table.add_column(title, justify=justify) + else: + table = Table.grid(padding=(0, 1)) + for _ in rows[0]: + table.add_column() for row in rows: - self.echo( - " ".join( - aligner(item, size) - for aligner, item, size in zip(aligners, row, sizes) - ).rstrip() - ) + table.add_row(*row) - @contextlib.contextmanager - def indent(self, prefix: str) -> Iterator[None]: - """Indent the following lines with a prefix.""" - _indent = self._indent - self._indent += prefix - yield - self._indent = _indent + _console.print(table) @contextlib.contextmanager def logging(self, type_: str = "install") -> Iterator[logging.Logger]: @@ -181,14 +197,16 @@ def logging(self, type_: str = "install") -> Iterator[logging.Logger]: """ file_name = mktemp(".log", f"pdm-{type_}-") - if self.verbosity >= DETAIL: - handler: logging.Handler = logging.StreamHandler() + if self.verbosity >= Verbosity.DETAIL: + handler: logging.Handler = RichHandler( + console=_err_console, show_time=False, show_level=False, show_path=False + ) + handler.setLevel(LOG_LEVELS[self.verbosity]) else: handler = logging.FileHandler(file_name, encoding="utf-8") - handler.setLevel(logging.DEBUG) - logger.handlers[1:] = [handler] - pip_logger = logging.getLogger("pip.subprocessor") - pip_logger.handlers[:] = [handler] + handler.setLevel(logging.DEBUG) + handler.setFormatter(logging.Formatter("%(name)s: %(message)s")) + logger.handlers[1:] = unearch_logger.handlers[:] = [handler] def cleanup() -> None: try: @@ -199,34 +217,31 @@ def cleanup() -> None: try: yield logger except Exception: - if self.verbosity < DETAIL: + if self.verbosity < Verbosity.DETAIL: logger.exception("Error occurs") - self.echo(yellow(f"See {file_name} for detailed debug log."), err=True) + self.echo( + f"See [bold yellow]{file_name}[/] for detailed debug log.", + style="red", + err=True, + ) raise else: atexit.register(cleanup) finally: logger.handlers.remove(handler) - pip_logger.handlers.remove(handler) - def open_spinner( - self, title: str, spinner: str = "dots" - ) -> Union[DummySpinner, halo.Halo]: + def open_spinner(self, title: str) -> Spinner: """Open a spinner as a context manager.""" - if self.verbosity >= DETAIL or not self.supports_ansi: + if self.verbosity >= Verbosity.DETAIL or not is_interactive(): return DummySpinner() else: - return halo.Halo( # type: ignore - title, spinner=spinner, indent=self._indent - ) - - -class Emoji: - """A collection of emoji characters used in terminal output""" - - if supports_unicode(): # type: ignore - SUCC = "🎉" - LOCK = "🔒" - else: - SUCC = "" - LOCK = "" + return _console.status(title, spinner=SPINNER, spinner_style="bold cyan") + + def make_progress(self, *columns: str | ProgressColumn, **kwargs: Any) -> Progress: + """create a progress instance for indented spinners""" + return Progress( + *columns, + console=_console, + disable=self.verbosity >= Verbosity.DETAIL, + **kwargs, + ) diff --git a/pdm/utils.py b/pdm/utils.py index 48f89eda04..31f00ea681 100644 --- a/pdm/utils.py +++ b/pdm/utils.py @@ -4,6 +4,7 @@ from __future__ import annotations import atexit +import contextlib import functools import json import os @@ -15,37 +16,14 @@ import tempfile import urllib.parse as parse import warnings -from contextlib import contextmanager from pathlib import Path from re import Match -from typing import ( - Any, - BinaryIO, - Callable, - Generic, - Iterable, - Iterator, - TextIO, - TypeVar, - cast, - no_type_check, - overload, -) +from typing import IO, Any, Callable, Generic, Iterator, TypeVar, overload from packaging.version import Version -from pip._vendor.packaging.tags import Tag -from pip._vendor.requests import Session - -from pdm._types import Distribution, Source -from pdm.models.pip_shims import ( - InstallCommand, - InstallRequirement, - Link, - PackageFinder, - PipWheel, - get_package_finder, - url_to_path, -) + +from pdm._types import Source +from pdm.compat import Distribution if sys.version_info >= (3, 8): from functools import cached_property @@ -99,42 +77,6 @@ def prepare_pip_source_args( return pip_args -def get_pypi_source() -> tuple[str, bool]: - """Get what is defined in pip.conf as the index-url.""" - install_cmd = InstallCommand() - options, _ = install_cmd.parser.parse_args([]) - index_url = options.index_url - parsed = parse.urlparse(index_url) - verify_ssl = parsed.scheme == "https" - if any(parsed.hostname.startswith(host) for host in options.trusted_hosts): - verify_ssl = False - return index_url, verify_ssl - - -def get_finder( - sources: list[Source], - cache_dir: str | None = None, - python_version: tuple[int, ...] | None = None, - python_abi_tag: str | None = None, - ignore_requires_python: bool = False, -) -> PackageFinder: - install_cmd = InstallCommand() - pip_args = prepare_pip_source_args(sources) - options, _ = install_cmd.parser.parse_args(pip_args) - if cache_dir: - options.cache_dir = cache_dir - finder = get_package_finder( - install_cmd=install_cmd, - options=options, - python_version=python_version, - python_abi_tag=python_abi_tag, - ignore_requires_python=ignore_requires_python, - ) - if not hasattr(finder, "session"): - finder.session = finder._link_collector.session # type: ignore - return finder - - def create_tracked_tempdir( suffix: str | None = None, prefix: str | None = None, dir: str | None = None ) -> str: @@ -148,9 +90,22 @@ def clean_up() -> None: return name -def parse_name_version_from_wheel(filename: str) -> tuple[str, str]: - w = PipWheel(os.path.basename(filename)) - return w.name, w.version +def get_index_urls(sources: list[Source]) -> tuple[list[str], list[str], list[str]]: + """Parse the project sources and return + (index_urls, find_link_urls, trusted_hosts) + """ + index_urls, find_link_urls, trusted_hosts = [], [], [] + for source in sources: + url = source["url"] + netloc = parse.urlparse(url).netloc + host = netloc.rsplit("@", 1)[-1] + if host not in trusted_hosts and not source.get("verify_ssl", True): + trusted_hosts.append(host) + if source.get("type", "index") == "index": + index_urls.append(url) + else: + find_link_urls.append(url) + return index_urls, find_link_urls, trusted_hosts def url_without_fragments(url: str) -> str: @@ -164,56 +119,6 @@ def join_list_with(items: list[Any], sep: Any) -> list[Any]: return new_items[:-1] -original_wheel_supported = PipWheel.supported -original_support_index_min = PipWheel.support_index_min -_has_find_most_preferred_tag = ( - getattr(PipWheel, "find_most_preferred_tag", None) is not None -) - -if _has_find_most_preferred_tag: - original_find: Any = PipWheel.find_most_preferred_tag -else: - original_find = None - - -@no_type_check -@contextmanager -def allow_all_wheels(enable: bool = True) -> Iterator: - """Monkey patch pip.Wheel to allow all wheels - - The usual checks against platforms and Python versions are ignored to allow - fetching all available entries in PyPI. This also saves the candidate cache - and set a new one, or else the results from the previous non-patched calls - will interfere. - """ - if not enable: - yield - return - - def _wheel_supported(self: PipWheel, tags: Iterable[Tag]) -> bool: - # Ignore current platform. Support everything. - return True - - def _wheel_support_index_min(self: PipWheel, tags: list[Tag]) -> int: - # All wheels are equal priority for sorting. - return 0 - - def _find_most_preferred_tag( - self: PipWheel, tags: list[Tag], tag_to_priority: dict[Tag, int] - ) -> int: - return 0 - - PipWheel.supported = _wheel_supported - PipWheel.support_index_min = _wheel_support_index_min - if _has_find_most_preferred_tag: - PipWheel.find_most_preferred_tag = _find_most_preferred_tag - yield - PipWheel.supported = original_wheel_supported - PipWheel.support_index_min = original_support_index_min - if _has_find_most_preferred_tag: - PipWheel.find_most_preferred_tag = original_find - - def find_project_root(cwd: str = ".", max_depth: int = 5) -> str | None: """Recursively find a `pyproject.toml` at given path or current working directory. If none if found, go to the parent directory, at most `max_depth` levels will be @@ -297,12 +202,15 @@ def get_in_project_venv_python(root: Path) -> Path | None: return None -@contextmanager +@contextlib.contextmanager def atomic_open_for_write( - filename: str | Path, *, encoding: str = "utf-8" -) -> Iterator[TextIO]: - fd, name = tempfile.mkstemp("-atomic-write", "pdm-") - fp = open(fd, "w", encoding=encoding) + filename: str | Path, *, mode: str = "w", encoding: str = "utf-8" +) -> Iterator[IO]: + dirname = os.path.dirname(filename) + if not os.path.exists(dirname): + os.makedirs(dirname) + fd, name = tempfile.mkstemp(prefix="atomic-write-", dir=dirname) + fp = open(fd, mode, encoding=encoding if "b" not in mode else None) try: yield fp except Exception: @@ -310,10 +218,8 @@ def atomic_open_for_write( raise else: fp.close() - try: + with contextlib.suppress(OSError): os.unlink(filename) - except OSError: - pass # The tempfile is created with mode 600, we need to restore the default mode # with copyfile() instead of move(). # See: https://github.com/pdm-project/pdm/issues/542 @@ -322,7 +228,7 @@ def atomic_open_for_write( os.unlink(name) -@contextmanager +@contextlib.contextmanager def cd(path: str | Path) -> Iterator: _old_cwd = os.getcwd() os.chdir(path) @@ -332,47 +238,59 @@ def cd(path: str | Path) -> Iterator: os.chdir(_old_cwd) -@contextmanager -def open_file(url: str, session: Session | None = None) -> Iterator[BinaryIO]: - if url.startswith("file://"): - local_path = url_to_path(url) - if os.path.isdir(local_path): - raise ValueError("Cannot open directory for read: {}".format(url)) - else: - with open(local_path, "rb") as local_file: - yield local_file +def url_to_path(url: str) -> str: + """ + Convert a file: URL to a path. + """ + from urllib.request import url2pathname + + WINDOWS = sys.platform == "win32" + + assert url.startswith( + "file:" + ), f"You can only turn file: urls into filenames (not {url!r})" + + _, netloc, path, _, _ = parse.urlsplit(url) + + if not netloc or netloc == "localhost": + # According to RFC 8089, same as empty authority. + netloc = "" + elif WINDOWS: + # If we have a UNC path, prepend UNC share notation. + netloc = "\\\\" + netloc else: - assert session - headers = {"Accept-Encoding": "identity"} - with session.get(url, headers=headers, stream=True) as resp: - try: - raw = getattr(resp, "raw", None) - result = raw or resp - yield result - finally: - if raw: - conn = getattr(raw, "_connection", None) - if conn is not None: - conn.close() - result.close() - - -def populate_link( - finder: PackageFinder, - ireq: InstallRequirement, - upgrade: bool = False, -) -> Link | None: - """Populate ireq's link attribute""" - if not ireq.link: - candidate = finder.find_requirement(ireq, upgrade) - if not candidate: - return None - link = cast(Link, getattr(candidate, "link", candidate)) - ireq.link = link - return ireq.link - - -_VT = TypeVar("_VT") + raise ValueError( + f"non-local file URIs are not supported on this platform: {url!r}" + ) + + path = url2pathname(netloc + path) + + # On Windows, urlsplit parses the path as something like "/C:/Users/foo". + # This creates issues for path-related functions like io.open(), so we try + # to detect and strip the leading slash. + if ( + WINDOWS + and not netloc # Not UNC. + and len(path) >= 3 + and path[0] == "/" # Leading slash to strip. + and path[1].isalpha() # Drive letter. + and path[2:4] in (":", ":/") # Colon + end of string, or colon + absolute path. + ): + path = path[1:] + + return path + + +def path_to_url(path: str) -> str: + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + from urllib.request import pathname2url + + path = os.path.normpath(os.path.abspath(path)) + url = parse.urljoin("file:", pathname2url(path)) + return url def expand_env_vars(credential: str, quote: bool = False) -> str: @@ -485,8 +403,9 @@ def get_rev_from_url(url: str) -> str: return "" -def normalize_name(name: str) -> str: - return re.sub(r"[^A-Za-z0-9.]+", "-", name).lower() +def normalize_name(name: str, lowercase: bool = True) -> str: + name = re.sub(r"[^A-Za-z0-9.]+", "-", name) + return name.lower() if lowercase else name def is_egg_link(dist: Distribution) -> bool: diff --git a/pyproject.toml b/pyproject.toml index 1a2c84cb1f..ee1fbe97e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,22 +9,24 @@ requires-python = ">=3.7" license = {text = "MIT"} dependencies = [ "blinker", - "click>=7", "findpython", "importlib-metadata; python_version < \"3.8\"", "installer>=0.5.1,<0.6", "packaging", - "pdm-pep517>=0.9,<1", + "pdm-pep517>=1.0.0,<2.0.0", "pep517>=0.11.0", - "pip>=20.1", + "pip>=20", "platformdirs", "python-dotenv>=0.15", "resolvelib>=0.8,<0.9", + "rich>=12.3.0", "shellingham>=1.3.2", - "tomli>=1.1.0", + "tomli>=1.1.0; python_version < \"3.11\"", "tomlkit>=0.8.0,<1", "typing-extensions; python_version < \"3.8\"", - "wheel>=0.36.2", + "unearth>=0.4.0,<0.5.0", + "cachecontrol[filecache]>=0.12.11", + "requests-toolbelt", ] name = "pdm" description = "Python Development Master" @@ -51,17 +53,20 @@ Changelog = "https://pdm.fming.dev/latest/dev/changelog/" pdm = "pdm.core:main" [tool.pdm] +version = {use_scm = true} + +[tool.pdm.build] includes = ["pdm"] excludes = ["./**/.git"] source-includes = ["tests", "CHANGELOG.md", "LICENSE", "README.md"] # editables backend doesn't work well with namespace packages editable-backend = "path" -version = {use_scm = true} [tool.pdm.scripts] pre_release = "python tasks/max_versions.py" release = "python tasks/release.py" -test = "pytest tests/" +test = "pytest" +tox = "tox" doc = {shell = "cd docs && mkdocs serve", help = "Start the dev server for doc preview"} lint = "pre-commit run --all-files" complete = {call = "tasks.complete:main", help = "Create autocomplete files for bash and fish"} @@ -69,10 +74,15 @@ benchmark = "python tasks/benchmarks/main.py" [tool.pdm.dev-dependencies] test = [ + "click>=8.1.3", "pytest", "pytest-cov", "pytest-mock", - "pytest-xdist>=1.31.0" + "pytest-xdist>=1.31.0", +] +tox = [ + "tox", + "tox-pdm>=0.5", ] doc = [ "mkdocs>=1.1", @@ -81,11 +91,11 @@ doc = [ "markdown-include>=0.5.1", "mike>=1.1.2", "setuptools>=62.3.3", + "markdown-exec>=0.7.0", ] workflow = [ "parver>=0.3.1", "towncrier>=20", - "vendoring; python_version >= \"3.8\"", "pycomplete~=0.3" ] @@ -103,7 +113,6 @@ exclude = ''' | buck-out | build | dist - | pdm/_vendor | tests/fixtures )/ ''' @@ -159,52 +168,19 @@ build-backend = "pdm.pep517.api" [tool.isort] profile = "black" atomic = true -skip_glob = ["*/setup.py", "pdm/_vendor/*"] +skip_glob = ["*/setup.py"] filter_files = true known_first_party = ["pdm"] known_third_party = [ - "appdirs", - "click", - "cfonts", - "distlib", - "halo", + "platformdirs", "packaging", - "pip_shims", "pytest", - "pythonfinder", - "tomlkit" -] - -[tool.vendoring] -destination = "pdm/_vendor/" -requirements = "pdm/_vendor/vendors.txt" -namespace = "pdm._vendor" - -protected-files = ["__init__.py", "README.md", "vendors.txt"] -patches-dir = "tasks/patches" - -[tool.vendoring.transformations] -substitute = [ - {match = 'import halo\.', replace = 'import pdm._vendor.halo.'} -] -drop = [ - "bin/", - "*.so", - "typing.*", - "*/tests/" + "findpython", + "tomlkit", + "unearth", + "requests", ] -[tool.vendoring.typing-stubs] -halo = [] -log_symbols = [] -spinners = [] -termcolor = [] -colorama = [] - -[tool.vendoring.license.directories] - -[tool.vendoring.license.fallback-urls] - [tool.pytest.ini_options] filterwarnings = [ "ignore::DeprecationWarning" @@ -216,3 +192,7 @@ markers = [ "deprecated: Tests about deprecated features", ] addopts = "-ra" +testpaths = [ + "tests/", +] + diff --git a/setup.cfg b/setup.cfg index ec10b73790..ef9ceaba95 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,7 +2,6 @@ exclude = .git, tests/fixtures/*, - pdm/_vendor/*, env, dist, build, @@ -21,11 +20,9 @@ branch = true source = pdm/ omit = pdm/__main__.py - pdm/installers/_editable_install.py pdm/pep582/sitecustomize.py - pdm/setup_dev.py pdm/models/in_process/*.py - pdm/_vendor/* + pdm/models/setup.py [coverage:report] # Regexes for lines to exclude from consideration @@ -48,4 +45,4 @@ ignore_missing_imports = True disallow_incomplete_defs = True disallow_untyped_defs = True disallow_untyped_decorators = True -exclude = pdm/(_vendor/|pep582/|models/in_process/.+\.py|resolver/providers\.py) +exclude = pdm/(pep582/|models/in_process/.+\.py) diff --git a/tasks/benchmarks/utils.py b/tasks/benchmarks/utils.py index 09be156cb8..5cbd335743 100644 --- a/tasks/benchmarks/utils.py +++ b/tasks/benchmarks/utils.py @@ -8,7 +8,18 @@ from time import monotonic from typing import Any, Callable, Generator, Sequence, TypeVar -import click +from rich.console import Console + +_console = Console(highlight=False) +_err_console = Console(stderr=True, highlight=False) + + +def echo(*args: str, err: bool = False, **kwargs: Any): + if err: + _err_console.print(*args, **kwargs) + else: + _console.print(*args, **kwargs) + PROJECT_DIR = Path(__file__).parent.joinpath("projects") @@ -33,9 +44,9 @@ def run(self, args: Sequence[str], **kwargs: Any) -> subprocess.CompletedProcess **kwargs, ) except subprocess.CalledProcessError as e: - click.secho(f"Run command {e.cmd} failed", fg="yellow", err=True) - click.secho(e.stdout.decode(), fg="yellow", err=True) - click.secho(e.stderr.decode(), fg="red", err=True) + echo(f"Run command {e.cmd} failed", style="yellow", err=True) + echo(e.stdout.decode(), style="yellow", err=True) + echo(e.stderr.decode(), style="red", err=True) sys.exit(1) def measure( @@ -44,7 +55,7 @@ def measure( time_start = monotonic() proc = self.run(args, **kwargs) time_cost = monotonic() - time_start - click.echo(f" {click.style(text + ':', fg='yellow'):>42s} {time_cost:.2f}s") + echo(f"[yellow]{(text + ':'):>42s}[/] {time_cost:.2f}s") return proc @@ -73,7 +84,7 @@ def temp_env() -> Generator[None, None, None]: def benchmark(func: TestFunc) -> Any: meta = func._meta version = subprocess.check_output([meta["cmd"], "--version"]).strip().decode("utf8") - click.secho(f"Running benchmark: {version}", fg="green") + echo(f"Running benchmark: {version}", style="green") project_file = PROJECT_DIR.joinpath(meta["project_file"]) with tempfile.TemporaryDirectory(prefix="pdm-benchmark-") as tempdir: if project_file.name.startswith("pyproject"): diff --git a/tasks/patches/halo-multiple.patch b/tasks/patches/halo-multiple.patch deleted file mode 100644 index cbfda78cd6..0000000000 --- a/tasks/patches/halo-multiple.patch +++ /dev/null @@ -1,240 +0,0 @@ -diff --git a/pdm/_vendor/halo/_utils.py b/pdm/_vendor/halo/_utils.py -index 14a0d62..c0b5522 100644 ---- a/pdm/_vendor/halo/_utils.py -+++ b/pdm/_vendor/halo/_utils.py -@@ -3,17 +3,13 @@ - """ - import codecs - import platform --import six - try: - from shutil import get_terminal_size - except ImportError: - from backports.shutil_get_terminal_size import get_terminal_size - --from colorama import init - from termcolor import colored - --init(autoreset=True) -- - - def is_supported(): - """Check whether operating system supports main symbols or not. -@@ -90,10 +86,7 @@ def is_text_type(text): - bool - Whether parameter is a string or not - """ -- if isinstance(text, six.text_type) or isinstance(text, six.string_types): -- return True -- -- return False -+ return isinstance(text, str) - - - def decode_utf_8_text(text): -diff --git a/pdm/_vendor/halo/halo.py b/pdm/_vendor/halo/halo.py -index 9bd32bb..4842d3c 100644 ---- a/pdm/_vendor/halo/halo.py -+++ b/pdm/_vendor/halo/halo.py -@@ -12,7 +12,7 @@ import time - - import halo.cursor as cursor - --from log_symbols.symbols import LogSymbols -+from log_symbols.symbols import LogSymbols, is_supported - from spinners.spinners import Spinners - - from halo._utils import ( -@@ -20,7 +20,6 @@ from halo._utils import ( - decode_utf_8_text, - get_environment, - get_terminal_columns, -- is_supported, - is_text_type, - encode_utf_8_text, - ) -@@ -35,11 +34,16 @@ class Halo(object): - """ - - CLEAR_LINE = "\033[K" -+ CLEAR_REST = "\033[J" - SPINNER_PLACEMENTS = ( - "left", - "right", - ) - -+ # a global list to keep all Halo instances -+ _instances = [] -+ _lock = threading.Lock() -+ - def __init__( - self, - text="", -@@ -50,6 +54,7 @@ class Halo(object): - placement="left", - interval=-1, - enabled=True, -+ indent="", - stream=sys.stdout, - ): - """Constructs the Halo object. -@@ -96,6 +101,9 @@ class Halo(object): - self._stop_spinner = None - self._spinner_id = None - self.enabled = enabled -+ self._stopped = False -+ self._content = "" -+ self.indent = indent - - environment = get_environment() - -@@ -294,7 +302,34 @@ class Halo(object): - - return True - -- def _write(self, s): -+ def _pop_stream_content_until_self(self, clear_self=False): -+ """Move cursor to the end of this instance's content and erase all contents -+ following it. -+ Parameters -+ ---------- -+ clear_self: bool -+ If equals True, the content of current line will also get cleared -+ Returns -+ ------- -+ str -+ The content of stream following this instance. -+ """ -+ erased_content = [] -+ lines_to_erase = self._content.count("\n") if clear_self else 0 -+ for inst in Halo._instances[::-1]: -+ if inst is self: -+ break -+ erased_content.append(inst._content) -+ lines_to_erase += inst._content.count("\n") -+ -+ if lines_to_erase > 0: -+ # Move cursor up n lines -+ self._write_stream("\033[{}A".format(lines_to_erase)) -+ # Erase rest content -+ self._write_stream(self.CLEAR_REST) -+ return "".join(reversed(erased_content)) -+ -+ def _write_stream(self, s): - """Write to the stream, if writable - Parameters - ---------- -@@ -304,15 +339,33 @@ class Halo(object): - if self._check_stream(): - self._stream.write(s) - -- def _hide_cursor(self): -- """Disable the user's blinking cursor -+ def _write(self, s, overwrite=False): -+ """Write to the stream and keep following lines unchanged. -+ Parameters -+ ---------- -+ s : str -+ Characters to write to the stream -+ overwrite: bool -+ If set to True, overwrite the content of current instance. - """ -+ if s.startswith("\r"): -+ s = f"\r{self.indent}{s[1:]}" -+ else: -+ s = f"{self.indent}{s}" -+ with Halo._lock: -+ erased_content = self._pop_stream_content_until_self(overwrite) -+ self._write_stream(s) -+ # Write back following lines -+ self._write_stream(erased_content) -+ self._content = s if overwrite else self._content + s -+ -+ def _hide_cursor(self): -+ """Disable the user's blinking cursor""" - if self._check_stream() and self._stream.isatty(): - cursor.hide(stream=self._stream) - - def _show_cursor(self): -- """Re-enable the user's blinking cursor -- """ -+ """Re-enable the user's blinking cursor""" - if self._check_stream() and self._stream.isatty(): - cursor.show(stream=self._stream) - -@@ -390,26 +443,26 @@ class Halo(object): - ------- - self - """ -- self._write("\r") -- self._write(self.CLEAR_LINE) -+ with Halo._lock: -+ erased_content = self._pop_stream_content_until_self(True) -+ self._content = "" -+ self._write_stream(erased_content) - return self - - def _render_frame(self): -- """Renders the frame on the line after clearing it. -- """ -+ """Renders the frame on the line after clearing it.""" - if not self.enabled: - # in case we're disabled or stream is closed while still rendering, - # we render the frame and increment the frame index, so the proper - # frame is rendered if we're reenabled or the stream opens again. - return - -- self.clear() - frame = self.frame() -- output = "\r{}".format(frame) -+ output = "\r{}\n".format(frame) - try: -- self._write(output) -+ self._write(output, True) - except UnicodeEncodeError: -- self._write(encode_utf_8_text(output)) -+ self._write(encode_utf_8_text(output), True) - - def render(self): - """Runs the render until thread flag is set. -@@ -490,6 +543,14 @@ class Halo(object): - if not (self.enabled and self._check_stream()): - return self - -+ # Clear all stale Halo instances created before -+ # Check against Halo._instances instead of self._instances -+ # to avoid possible overriding in subclasses. -+ if all(inst._stopped for inst in Halo._instances): -+ Halo._instances[:] = [] -+ # Allow for calling start() multiple times -+ if self not in Halo._instances: -+ Halo._instances.append(self) - self._hide_cursor() - - self._stop_spinner = threading.Event() -@@ -498,6 +559,7 @@ class Halo(object): - self._render_frame() - self._spinner_id = self._spinner_thread.name - self._spinner_thread.start() -+ self._stopped = False - - return self - -@@ -511,12 +573,17 @@ class Halo(object): - self._stop_spinner.set() - self._spinner_thread.join() - -+ if self._stopped: -+ return -+ - if self.enabled: - self.clear() - - self._frame_index = 0 - self._spinner_id = None - self._show_cursor() -+ self._stopped = True -+ - return self - - def succeed(self, text=None): diff --git a/tasks/patches/log-symbols-support-utf8.patch b/tasks/patches/log-symbols-support-utf8.patch deleted file mode 100644 index 1225e21ae4..0000000000 --- a/tasks/patches/log-symbols-support-utf8.patch +++ /dev/null @@ -1,40 +0,0 @@ -diff --git a/pdm/_vendor/log_symbols/symbols.py b/pdm/_vendor/log_symbols/symbols.py -index b7047fc..3ed2ef0 100644 ---- a/pdm/_vendor/log_symbols/symbols.py -+++ b/pdm/_vendor/log_symbols/symbols.py -@@ -1,6 +1,9 @@ - # -*- coding: utf-8 -*- - """Provide log symbols for various log levels.""" --import platform -+import codecs -+import locale -+import os -+import sys - - from enum import Enum - from colorama import init, deinit, Fore -@@ -30,13 +33,17 @@ def is_supported(): - boolean - Whether operating system supports main symbols or not - """ -- -- os_arch = platform.system() -- -- if os_arch != 'Windows': -- return True -- -- return False -+ if os.getenv("DISABLE_UNICODE_OUTPUT"): -+ return False -+ encoding = getattr(sys.stdout, "encoding") -+ if encoding is None: -+ encoding = locale.getpreferredencoding(False) -+ -+ try: -+ encoding = codecs.lookup(encoding).name -+ except Exception: -+ encoding = "utf-8" -+ return encoding == "utf-8" - - - _SYMBOLS = _MAIN if is_supported() else _FALLBACKS diff --git a/tasks/release.py b/tasks/release.py index 0d61bf2e86..beb8266ee5 100644 --- a/tasks/release.py +++ b/tasks/release.py @@ -2,27 +2,39 @@ import subprocess import sys from pathlib import Path +from typing import Any -import click import parver +from rich.console import Console + +_console = Console(highlight=False) +_err_console = Console(stderr=True, highlight=False) + + +def echo(*args: str, err: bool = False, **kwargs: Any): + if err: + _err_console.print(*args, **kwargs) + else: + _console.print(*args, **kwargs) + PROJECT_DIR = Path(__file__).parent.parent def get_current_version(): - from pdm.pep517.metadata import Metadata + from pdm.pep517.base import Builder - metadata = Metadata(PROJECT_DIR / "pyproject.toml") + metadata = Builder(PROJECT_DIR).meta return metadata.version def bump_version(pre=None, major=False, minor=False, patch=True): - if not any([major, minor, patch]): + if not any([major, minor, patch, pre]): patch = True - if len([v for v in [major, minor, patch] if v]) != 1: - click.secho( + if len([v for v in [major, minor, patch] if v]) > 1: + echo( "Only one option should be provided among " "(--major, --minor, --patch)", - fg="red", + style="red", err=True, ) sys.exit(1) @@ -42,7 +54,7 @@ def bump_version(pre=None, major=False, minor=False, patch=True): def release(dry_run=False, commit=True, pre=None, major=False, minor=False, patch=True): new_version = bump_version(pre, major, minor, patch) - click.secho(f"Bump version to: {new_version}", fg="yellow") + echo(f"Bump version to: {new_version}", style="yellow") if dry_run: subprocess.check_call( ["towncrier", "build", "--version", new_version, "--draft"] diff --git a/tests/cli/conftest.py b/tests/cli/conftest.py new file mode 100644 index 0000000000..4065a7e549 --- /dev/null +++ b/tests/cli/conftest.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +import shutil +import textwrap +from dataclasses import dataclass +from pathlib import Path +from typing import Any +from unittest.mock import MagicMock + +import pytest +import requests +from pytest_mock import MockerFixture + +from pdm.cli.commands.publish.package import PackageFile +from pdm.cli.commands.publish.repository import Repository +from tests import FIXTURES + + +@pytest.fixture +def mock_run_gpg(mocker: MockerFixture): + def mock_run_gpg(args): + signature_file = args[-1] + ".asc" + with open(signature_file, "wb") as f: + f.write(b"fake signature") + + mocker.patch.object(PackageFile, "_run_gpg", side_effect=mock_run_gpg) + + +@pytest.fixture +def prepare_packages(tmp_path: Path): + dist_path = tmp_path / "dist" + dist_path.mkdir() + for filename in [ + "demo-0.0.1-py2.py3-none-any.whl", + "demo-0.0.1.tar.gz", + "demo-0.0.1.zip", + ]: + shutil.copy2(FIXTURES / "artifacts" / filename, dist_path) + + +@pytest.fixture +def mock_pypi(mocker: MockerFixture): + def post(url, *, data, **kwargs): + # consume the data body to make the progress complete + data.read() + resp = requests.Response() + resp.status_code = 200 + resp.reason = "OK" + resp.url = url + return resp + + return mocker.patch("pdm.models.session.PDMSession.post", side_effect=post) + + +@pytest.fixture +def uploaded(mocker: MockerFixture): + packages = [] + + def fake_upload(package, progress): + packages.append(package) + resp = requests.Response() + resp.status_code = 200 + resp.reason = "OK" + resp.url = "https://upload.pypi.org/legacy/" + return resp + + mocker.patch.object(Repository, "upload", side_effect=fake_upload) + return packages + + +@dataclass +class PublishMock: + mock_pypi: MagicMock + uploaded: list[Any] + + +@pytest.fixture +# @pytest.mark.usefixtures("mock_run_gpg", "prepare_packages") +def mock_publish(mock_pypi, uploaded) -> PublishMock: + return PublishMock( + mock_pypi=mock_pypi, + uploaded=uploaded, + ) + + +@pytest.fixture +def _echo(project): + """ + Provides an echo.py script producing cross-platform expectable outputs + """ + (project.root / "echo.py").write_text( + textwrap.dedent( + """\ + import os, sys, io + sys.stdout = io.TextIOWrapper(sys.stdout.buffer, newline='\\n') + name = sys.argv[1] + vars = " ".join([f"{v}={os.getenv(v)}" for v in sys.argv[2:]]) + print(f"{name} CALLED with {vars}" if vars else f"{name} CALLED") + """ + ) + ) diff --git a/tests/cli/test_add.py b/tests/cli/test_add.py index c8064a36fc..56fceef5a9 100644 --- a/tests/cli/test_add.py +++ b/tests/cli/test_add.py @@ -1,10 +1,10 @@ import shutil -from pathlib import Path import pytest +from unearth import Link from pdm.cli import actions -from pdm.models.pip_shims import Link +from pdm.exceptions import PdmUsageError from pdm.models.specifiers import PySpecSet from tests import FIXTURES @@ -55,27 +55,23 @@ def test_add_package_to_custom_dev_group(project, working_set): @pytest.mark.usefixtures("repository", "vcs") -def test_add_editable_package(project, working_set, is_dev): +def test_add_editable_package(project, working_set): # Ensure that correct python version is used. project.environment.python_requires = PySpecSet(">=3.6") - actions.do_add(project, is_dev, packages=["demo"]) + actions.do_add(project, True, packages=["demo"]) actions.do_add( project, - is_dev, + True, editables=["git+https://github.com/test-root/demo.git#egg=demo"], ) - group = ( - project.tool_settings["dev-dependencies"]["dev"] - if is_dev - else project.meta["dependencies"] - ) - assert "demo" in group[0] - assert "-e git+https://github.com/test-root/demo.git#egg=demo" in group[1] + group = project.tool_settings["dev-dependencies"]["dev"] + assert group == ["-e git+https://github.com/test-root/demo.git#egg=demo"] locked_candidates = project.locked_repository.all_candidates assert ( locked_candidates["demo"].prepare(project.environment).revision == "1234567890abcdef" ) + assert working_set["demo"].link_file assert locked_candidates["idna"].version == "2.7" assert "idna" in working_set @@ -83,48 +79,37 @@ def test_add_editable_package(project, working_set, is_dev): assert not working_set["demo"].link_file -@pytest.mark.usefixtures("repository", "vcs") -def test_editable_package_override_non_editable(project, working_set): +@pytest.mark.usefixtures("repository", "vcs", "working_set") +def test_add_editable_package_to_metadata_forbidden(project): project.environment.python_requires = PySpecSet(">=3.6") - actions.do_add( - project, packages=["git+https://github.com/test-root/demo.git#egg=demo"] - ) - actions.do_add( - project, - editables=["git+https://github.com/test-root/demo.git#egg=demo"], - ) - assert working_set["demo"].link_file + with pytest.raises(PdmUsageError): + actions.do_add( + project, editables=["git+https://github.com/test-root/demo.git#egg=demo"] + ) + with pytest.raises(PdmUsageError): + actions.do_add( + project, + group="foo", + editables=["git+https://github.com/test-root/demo.git#egg=demo"], + ) @pytest.mark.usefixtures("repository", "vcs") -def test_non_editable_no_override_editable(project, working_set, is_editable): +def test_non_editable_override_editable(project, working_set): project.environment.python_requires = PySpecSet(">=3.6") actions.do_add( project, + dev=True, editables=[ "git+https://github.com/test-root/demo.git#egg=demo", - "git+https://github.com/test-root/demo-module.git#egg=demo-module", ], ) actions.do_add( project, + dev=True, packages=["git+https://github.com/test-root/demo.git#egg=demo"], - no_editable=not is_editable, ) - assert working_set["demo-module"].link_file - assert bool(working_set["demo"].link_file) is is_editable - dependencies = project.get_pyproject_dependencies("default") - if is_editable: - assert dependencies == [ - "-e git+https://github.com/test-root/demo.git#egg=demo", - "-e git+https://github.com/test-root/demo-module.git#egg=demo-module", - "demo @ git+https://github.com/test-root/demo.git", - ] - else: - assert dependencies == [ - "demo @ git+https://github.com/test-root/demo.git", - "-e git+https://github.com/test-root/demo-module.git#egg=demo-module", - ] + assert not project.dev_dependencies["demo"].editable @pytest.mark.usefixtures("repository", "working_set") @@ -280,11 +265,13 @@ def test_add_cached_vcs_requirement(project, mocker): url = "git+https://github.com/test-root/demo.git@1234567890abcdef#egg=demo" built_path = FIXTURES / "artifacts/demo-0.0.1-py2.py3-none-any.whl" wheel_cache = project.make_wheel_cache() - cache_path = Path(wheel_cache.get_path_for_link(Link(url))) + cache_path = wheel_cache.get_path_for_link( + Link(url), project.environment.target_python + ) if not cache_path.exists(): cache_path.mkdir(parents=True) shutil.copy2(built_path, cache_path) - downloader = mocker.patch("pdm.models.pip_shims.unpack_url") + downloader = mocker.patch("unearth.finder.unpack_link") builder = mocker.patch("pdm.builders.WheelBuilder.build") actions.do_add(project, packages=[url], no_self=True) lockfile_entry = next(p for p in project.lockfile["package"] if p["name"] == "demo") diff --git a/tests/cli/test_cache.py b/tests/cli/test_cache.py index 78f9419a91..2a5e5db0ab 100644 --- a/tests/cli/test_cache.py +++ b/tests/cli/test_cache.py @@ -1,7 +1,7 @@ import pytest +from unearth import Link from pdm.installers.packages import CachedPackage -from pdm.models.pip_shims import Link from tests import FIXTURES @@ -96,7 +96,7 @@ def test_cache_remove_wildcard(project, invoke): ): assert not (project.cache("wheels") / "arbitrary/path" / name).exists() - assert not (project.cache("http") / "arbitrary/path/foo-0.1.0.tar.gz").exists() + assert (project.cache("http") / "arbitrary/path/foo-0.1.0.tar.gz").exists() @pytest.mark.usefixtures("prepare_wheel_cache", "prepare_http_cache") @@ -159,8 +159,7 @@ def test_cache_info(project, invoke): def test_hash_cache(project, url, hash): with project.environment.get_finder() as finder: hash_cache = project.make_hash_cache() - hash_cache.session = finder.session - assert hash_cache.get_hash(Link(url)) == hash + assert hash_cache.get_hash(Link(url), finder.session) == hash def test_clear_package_cache(project, invoke): diff --git a/tests/cli/test_config.py b/tests/cli/test_config.py index 9a79bc5b92..80e4269368 100644 --- a/tests/cli/test_config.py +++ b/tests/cli/test_config.py @@ -2,6 +2,7 @@ import pytest +from pdm.exceptions import PdmUsageError from pdm.utils import cd @@ -112,3 +113,97 @@ def test_specify_config_file(tmp_path, invoke): result = invoke(["config", "project_max_depth"]) assert result.exit_code == 0 assert result.output.strip() == "9" + + +def test_default_repository_setting(project): + repository = project.global_config.get_repository_config("pypi") + assert repository.url == "https://upload.pypi.org/legacy/" + assert repository.username is None + assert repository.password is None + + repository = project.global_config.get_repository_config("testpypi") + assert repository.url == "https://test.pypi.org/legacy/" + + repository = project.global_config.get_repository_config("nonexist") + assert repository is None + + +def test_repository_config_not_available_on_project(project): + with pytest.raises(PdmUsageError): + project.project_config.get_repository_config("pypi") + + +def test_repository_config_key_short(project): + with pytest.raises(PdmUsageError): + project.global_config["repository.test"] = {"url": "https://example.org/simple"} + + with pytest.raises(PdmUsageError): + project.global_config["repository"] = "123" + + with pytest.raises(PdmUsageError): + del project.global_config["repository"] + + +def test_repostory_overwrite_default(project): + project.global_config["repository.pypi.username"] = "foo" + project.global_config["repository.pypi.password"] = "bar" + repository = project.global_config.get_repository_config("pypi") + assert repository.url == "https://upload.pypi.org/legacy/" + assert repository.username == "foo" + assert repository.password == "bar" + + project.global_config["repository.pypi.url"] = "https://example.pypi.org/legacy/" + repository = project.global_config.get_repository_config("pypi") + assert repository.url == "https://example.pypi.org/legacy/" + + +def test_hide_password_in_output(project, invoke): + assert project.global_config["repository.pypi.password"] is None + project.global_config["repository.pypi.username"] = "testuser" + project.global_config["repository.pypi.password"] = "secret" + result = invoke(["config", "repository.pypi"], obj=project, strict=True) + assert "password = " in result.output + result = invoke(["config", "repository.pypi.password"], obj=project, strict=True) + assert "" == result.output.strip() + + +def test_config_get_repository(project, invoke): + config = project.global_config["repository.pypi"] + assert config == project.global_config.get_repository_config("pypi") + assert ( + project.global_config["repository.pypi.url"] + == "https://upload.pypi.org/legacy/" + ) + + result = invoke(["config", "repository.pypi"], obj=project, strict=True) + assert result.stdout.strip() == "url = https://upload.pypi.org/legacy/" + + assert ( + project.global_config.get_repository_config( + "https://example.pypi.org/legacy/" + ).url + == "https://example.pypi.org/legacy/" + ) + + result = invoke(["config", "repository.pypi.url"], obj=project, strict=True) + assert result.stdout.strip() == "https://upload.pypi.org/legacy/" + + +def test_config_set_repository(project): + project.global_config["repository.pypi.url"] = "https://example.pypi.org/legacy/" + project.global_config["repository.pypi.username"] = "foo" + assert ( + project.global_config["repository.pypi.url"] + == "https://example.pypi.org/legacy/" + ) + assert project.global_config["repository.pypi.username"] == "foo" + del project.global_config["repository.pypi.username"] + assert project.global_config["repository.pypi.username"] is None + + +def test_config_del_repository(project): + project.global_config["repository.test.url"] = "https://example.org/simple" + assert project.global_config.get_repository_config("test") is not None + + del project.global_config["repository.test"] + assert project.global_config.get_repository_config("test") is None diff --git a/tests/cli/test_hooks.py b/tests/cli/test_hooks.py new file mode 100644 index 0000000000..bbfa995381 --- /dev/null +++ b/tests/cli/test_hooks.py @@ -0,0 +1,309 @@ +import shlex +from collections import namedtuple +from textwrap import dedent + +import pytest + +from pdm.cli import actions +from pdm.cli.hooks import KNOWN_HOOKS +from pdm.cli.options import from_splitted_env +from pdm.models.requirements import parse_requirement + + +def test_pre_script_fail_fast(project, invoke, capfd, mocker): + project.tool_settings["scripts"] = { + "pre_install": "python -c \"print('PRE INSTALL CALLED'); exit(1)\"", + "post_install": "python -c \"print('POST INSTALL CALLED')\"", + } + project.write_pyproject() + synchronize = mocker.patch("pdm.installers.synchronizers.Synchronizer.synchronize") + result = invoke(["install"], obj=project) + assert result.exit_code == 1 + out, _ = capfd.readouterr() + assert "PRE INSTALL CALLED" in out + assert "POST INSTALL CALLED" not in out + synchronize.assert_not_called() + + +def test_pre_and_post_scripts(project, invoke, capfd, _echo): + project.tool_settings["scripts"] = { + "pre_script": "python echo.py pre_script", + "post_script": "python echo.py post_script", + "pre_test": "python echo.py pre_test", + "test": "python echo.py test", + "post_test": "python echo.py post_test", + "pre_run": "python echo.py pre_run", + "post_run": "python echo.py post_run", + } + project.write_pyproject() + capfd.readouterr() + invoke(["run", "test"], strict=True, obj=project) + out, _ = capfd.readouterr() + expected = dedent( + """ + pre_run CALLED + pre_script CALLED + pre_test CALLED + test CALLED + post_test CALLED + post_script CALLED + post_run CALLED + """ + ).strip() + assert out.strip() == expected + + +def test_composite_runs_all_hooks(project, invoke, capfd, _echo): + project.tool_settings["scripts"] = { + "test": {"composite": ["first", "second"]}, + "pre_test": "python echo.py Pre-Test", + "post_test": "python echo.py Post-Test", + "first": "python echo.py First", + "pre_first": "python echo.py Pre-First", + "second": "python echo.py Second", + "post_second": "python echo.py Post-Second", + "pre_script": "python echo.py Pre-Script", + "post_script": "python echo.py Post-Script", + "pre_run": "python echo.py Pre-Run", + "post_run": "python echo.py Post-Run", + } + project.write_pyproject() + capfd.readouterr() + invoke(["run", "test"], strict=True, obj=project) + out, _ = capfd.readouterr() + expected = dedent( + """ + Pre-Run CALLED + Pre-Script CALLED + Pre-Test CALLED + Pre-Script CALLED + Pre-First CALLED + First CALLED + Post-Script CALLED + Pre-Script CALLED + Second CALLED + Post-Second CALLED + Post-Script CALLED + Post-Test CALLED + Post-Script CALLED + Post-Run CALLED + """ + ).strip() + assert out.strip() == expected + + +@pytest.mark.parametrize("option", [":all", ":pre,:post"]) +def test_skip_all_hooks_option(project, invoke, capfd, option: str, _echo): + project.tool_settings["scripts"] = { + "test": {"composite": ["first", "second"]}, + "pre_test": "python echo.py Pre-Test", + "post_test": "python echo.py Post-Test", + "first": "python echo.py First", + "pre_first": "python echo.py Pre-First", + "post_first": "python echo.py Post-First", + "second": "python echo.py Second", + "pre_second": "python echo.py Pre-Second", + "post_second": "python echo.py Post-Second", + "pre_script": "python echo.py Pre-Script", + "post_script": "python echo.py Post-Script", + "pre_run": "python echo.py Pre-Run", + "post_run": "python echo.py Post-Run", + } + project.write_pyproject() + capfd.readouterr() + invoke(["run", f"--skip={option}", "first"], strict=True, obj=project) + out, _ = capfd.readouterr() + assert "Pre-First CALLED" not in out + assert "First CALLED" in out + assert "Post-First CALLED" not in out + assert "Pre-Script CALLED" not in out + assert "Post-Script CALLED" not in out + capfd.readouterr() + invoke(["run", f"--skip={option}", "test"], strict=True, obj=project) + out, _ = capfd.readouterr() + assert "Pre-Test CALLED" not in out + assert "Pre-First CALLED" not in out + assert "First CALLED" in out + assert "Post-First CALLED" not in out + assert "Pre-Second CALLED" not in out + assert "Second CALLED" in out + assert "Post-Second CALLED" not in out + assert "Post-Test CALLED" not in out + assert "Pre-Script CALLED" not in out + assert "Post-Script CALLED" not in out + assert "Pre-Run CALLED" not in out + assert "Post-Run CALLED" not in out + + +@pytest.mark.parametrize( + "args", + [ + "--skip pre_test,post_first,second", + "-k pre_test,post_first,second", + "--skip pre_test --skip post_first --skip second", + "-k pre_test -k post_first -k second", + "--skip pre_test --skip post_first,second", + "-k pre_test -k post_first,second", + ], +) +def test_skip_option(project, invoke, capfd, args, _echo): + project.tool_settings["scripts"] = { + "test": {"composite": ["first", "second"]}, + "pre_test": "python echo.py Pre-Test", + "post_test": "python echo.py Post-Test", + "first": "python echo.py First", + "pre_first": "python echo.py Pre-First", + "post_first": "python echo.py Post-First", + "second": "python echo.py Second", + "pre_second": "python echo.py Pre-Second", + "post_second": "python echo.py Post-Second", + } + project.write_pyproject() + capfd.readouterr() + invoke(["run", *shlex.split(args), "test"], strict=True, obj=project) + out, _ = capfd.readouterr() + assert "Pre-Test CALLED" not in out + assert "Pre-First CALLED" in out + assert "First CALLED" in out + assert "Post-First CALLED" not in out + assert "Pre-Second CALLED" not in out + assert "Second CALLED" not in out + assert "Post-Second CALLED" not in out + assert "Post-Test CALLED" in out + + +@pytest.mark.parametrize( + "env, expected", + [ + ("pre_test", ["pre_test"]), + ("pre_test,post_test", ["pre_test", "post_test"]), + ("pre_test , post_test", ["pre_test", "post_test"]), + (None, None), + (" ", None), + (" , ", None), + ], +) +def test_skip_option_default_from_env(env, expected, monkeypatch): + if env is not None: + monkeypatch.setenv("PDM_SKIP_HOOKS", env) + + # Default value is set once and not easily testable + # so we test the function generating this default value + assert from_splitted_env("PDM_SKIP_HOOKS", ",") == expected + + +HookSpecs = namedtuple("HookSpecs", ["command", "hooks", "fixtures"]) + +KNOWN_COMMAND_HOOKS = ( + ("add", "add requests", ("pre_lock", "post_lock"), ["working_set"]), + ("build", "build", ("pre_build", "post_build"), []), + ("init", "init --non-interactive", ("post_init",), []), + ( + "install", + "install", + ("pre_install", "post_install", "pre_lock", "post_lock"), + ["repository"], + ), + ("lock", "lock", ("pre_lock", "post_lock"), []), + ( + "publish", + "publish", + ("pre_publish", "pre_build", "post_build", "post_publish"), + ["mock_publish"], + ), + ("remove", "remove requests", ("pre_lock", "post_lock"), ["lock"]), + ("sync", "sync", ("pre_install", "post_install"), ["lock"]), + ("update", "update", ("pre_install", "post_install", "pre_lock", "post_lock"), []), + ("use", "use -f 3.7", ("post_use",), []), +) + +parametrize_with_commands = pytest.mark.parametrize( + "specs", + [ + pytest.param(HookSpecs(command, hooks, fixtures), id=id) + for id, command, hooks, fixtures in KNOWN_COMMAND_HOOKS + ], +) + +parametrize_with_hooks = pytest.mark.parametrize( + "specs,hook", + [ + pytest.param(HookSpecs(command, hooks, fixtures), hook, id=f"{id}-{hook}") + for id, command, hooks, fixtures in KNOWN_COMMAND_HOOKS + for hook in hooks + ], +) + + +@pytest.fixture +def hooked_project(project, capfd, specs, request): + project.tool_settings["scripts"] = { + hook: f"python -c \"print('{hook} CALLED')\"" for hook in KNOWN_HOOKS + } + project.write_pyproject() + for fixture in specs.fixtures: + request.getfixturevalue(fixture) + capfd.readouterr() + return project + + +@pytest.fixture +def lock(project, capfd): + project.add_dependencies({"requests": parse_requirement("requests")}) + actions.do_lock(project) + capfd.readouterr() + + +@parametrize_with_commands +def test_hooks(hooked_project, invoke, capfd, specs: HookSpecs): + invoke(shlex.split(specs.command), strict=True, obj=hooked_project) + out, _ = capfd.readouterr() + for hook in specs.hooks: + assert f"{hook} CALLED" in out + + +@parametrize_with_hooks # Iterate over hooks as we need a clean slate for each run +def test_skip_option_from_signal( + hooked_project, invoke, capfd, specs: HookSpecs, hook: str +): + invoke( + [*shlex.split(specs.command), f"--skip={hook}"], strict=True, obj=hooked_project + ) + out, _ = capfd.readouterr() + assert f"{hook} CALLED" not in out + for known_hook in specs.hooks: + if known_hook != hook: + assert f"{known_hook} CALLED" in out + + +@parametrize_with_commands +@pytest.mark.parametrize("option", [":all", ":pre,:post"]) +def test_skip_all_option_from_signal( + hooked_project, invoke, capfd, specs: HookSpecs, option: str +): + invoke( + [*shlex.split(specs.command), f"--skip={option}"], + strict=True, + obj=hooked_project, + ) + out, _ = capfd.readouterr() + for hook in KNOWN_HOOKS: + assert f"{hook} CALLED" not in out + + +@parametrize_with_commands +@pytest.mark.parametrize("prefix", ["pre", "post"]) +def test_skip_pre_post_option_from_signal( + hooked_project, invoke, capfd, specs: HookSpecs, prefix: str +): + invoke( + [*shlex.split(specs.command), f"--skip=:{prefix}"], + strict=True, + obj=hooked_project, + ) + out, _ = capfd.readouterr() + for hook in specs.hooks: + if hook.startswith(prefix): + assert f"{hook} CALLED" not in out + else: + assert f"{hook} CALLED" in out diff --git a/tests/cli/test_install.py b/tests/cli/test_install.py index fdd8d9b1a5..c4945c6934 100644 --- a/tests/cli/test_install.py +++ b/tests/cli/test_install.py @@ -119,11 +119,12 @@ def test_sync_without_self(project, working_set): def test_sync_with_index_change(project, index): + project.project_config["pypi.url"] = "https://my.pypi.org/simple" project.meta["requires-python"] = ">=3.6" project.meta["dependencies"] = ["future-fstrings"] project.write_pyproject() index[ - "future-fstrings" + "/simple/future-fstrings/" ] = """ @@ -141,7 +142,7 @@ def test_sync_with_index_change(project, index): "sha256:90e49598b553d8746c4dc7d9442e0359d038c3039d802c91c0a55505da318c63" ] # Mimic the CDN inconsistences of PyPI simple index. See issues/596. - del index["future-fstrings"] + del index["/simple/future-fstrings/"] actions.do_sync(project, no_self=True) diff --git a/tests/cli/test_lock.py b/tests/cli/test_lock.py index e3eb48e50a..720c8c0b41 100644 --- a/tests/cli/test_lock.py +++ b/tests/cli/test_lock.py @@ -1,3 +1,5 @@ +from unittest.mock import ANY + import pytest from pdm.cli import actions @@ -7,7 +9,7 @@ def test_lock_command(project, invoke, mocker): m = mocker.patch.object(actions, "do_lock") invoke(["lock"], obj=project) - m.assert_called_with(project, refresh=False) + m.assert_called_with(project, refresh=False, hooks=ANY) @pytest.mark.usefixtures("repository") @@ -64,3 +66,15 @@ def test_innovations_with_specified_lockfile(invoke, project, working_set): assert "requests" in locked invoke(["sync", "--lockfile", lockfile], strict=True, obj=project) assert "requests" in working_set + + +@pytest.mark.usefixtures("repository", "vcs") +def test_skip_editable_dependencies_in_metadata(project, capsys): + project.meta["dependencies"] = [ + "-e git+https://github.com/test-root/demo.git@1234567890abcdef#egg=demo" + ] + project.write_pyproject() + actions.do_lock(project) + _, err = capsys.readouterr() + assert "WARNING: Skipping editable dependency" in err + assert not project.locked_repository.all_candidates diff --git a/tests/cli/test_others.py b/tests/cli/test_others.py index f237328563..142a4da55f 100644 --- a/tests/cli/test_others.py +++ b/tests/cli/test_others.py @@ -1,3 +1,5 @@ +from unittest.mock import ANY + import pytest from pdm.cli import actions @@ -38,7 +40,7 @@ def test_init_validate_python_requires(project_no_init): def test_help_option(invoke): result = invoke(["--help"]) - assert "PDM - Python Development Master" in result.output + assert "Usage: pdm [-h]" in result.output def test_info_command(project, invoke): @@ -93,6 +95,7 @@ def test_init_command(project_no_init, invoke, mocker): author="Testing", email="me@example.org", python_requires=f">={python_version}", + hooks=ANY, ) @@ -118,6 +121,7 @@ def test_init_command_library(project_no_init, invoke, mocker): author="Testing", email="me@example.org", python_requires=f">={python_version}", + hooks=ANY, ) @@ -127,9 +131,16 @@ def test_init_non_interactive(project_no_init, invoke, mocker): return_value=("Testing", "me@example.org"), ) do_init = mocker.patch.object(actions, "do_init") + do_use = mocker.patch.object(actions, "do_use") result = invoke(["init", "-n"], obj=project_no_init) assert result.exit_code == 0 python_version = f"{project_no_init.python.major}.{project_no_init.python.minor}" + do_use.assert_called_once_with( + project_no_init, + ANY, + True, + hooks=ANY, + ) do_init.assert_called_with( project_no_init, name="", @@ -139,6 +150,7 @@ def test_init_non_interactive(project_no_init, invoke, mocker): author="Testing", email="me@example.org", python_requires=f">={python_version}", + hooks=ANY, ) @@ -209,7 +221,6 @@ def test_export_to_requirements_txt(invoke, fixture_project): requirements_pyproject = project.root / "requirements.ini" result = invoke(["export"], obj=project) - print("==========OUTPUT=============", result.output.strip(), result.stderr.strip()) assert result.exit_code == 0 assert result.output.strip() == requirements_txt.read_text().strip() @@ -246,7 +257,8 @@ def test_completion_command(invoke): @pytest.mark.network -def test_show_update_hint(invoke, project): +def test_show_update_hint(invoke, project, monkeypatch): + monkeypatch.delenv("PDM_CHECK_UPDATE", raising=False) prev_version = project.core.version try: project.core.version = "0.0.0" @@ -254,4 +266,4 @@ def test_show_update_hint(invoke, project): finally: project.core.version = prev_version assert "to upgrade." in r.stderr - assert "Run $ pdm config check_update false to disable the check." in r.stderr + assert "Run `pdm config check_update false` to disable the check." in r.stderr diff --git a/tests/cli/test_publish.py b/tests/cli/test_publish.py new file mode 100644 index 0000000000..b93c4f52a1 --- /dev/null +++ b/tests/cli/test_publish.py @@ -0,0 +1,137 @@ +import os +from argparse import Namespace + +import pytest + +from pdm.cli.commands.publish import Command as PublishCommand +from pdm.cli.commands.publish.package import PackageFile +from pdm.cli.commands.publish.repository import Repository +from tests import FIXTURES + +pytestmark = pytest.mark.usefixtures("mock_run_gpg") + + +@pytest.mark.parametrize( + "filename", + ["demo-0.0.1-py2.py3-none-any.whl", "demo-0.0.1.tar.gz", "demo-0.0.1.zip"], +) +def test_package_parse_metadata(filename): + fullpath = FIXTURES / "artifacts" / filename + package = PackageFile.from_filename(str(fullpath), None) + assert package.base_filename == filename + meta = package.metadata_dict + assert meta["name"] == "demo" + assert meta["version"] == "0.0.1" + assert all( + f"{hash_name}_digest" in meta for hash_name in ["md5", "sha256", "blake2_256"] + ) + + if filename.endswith(".whl"): + assert meta["pyversion"] == "py2.py3" + assert meta["filetype"] == "bdist_wheel" + else: + assert meta["pyversion"] == "source" + assert meta["filetype"] == "sdist" + + +def test_package_add_signature(tmp_path): + package = PackageFile.from_filename( + str(FIXTURES / "artifacts/demo-0.0.1-py2.py3-none-any.whl"), None + ) + tmp_path.joinpath("signature.asc").write_bytes(b"test gpg signature") + package.add_gpg_signature(str(tmp_path / "signature.asc"), "signature.asc") + assert package.gpg_signature == ("signature.asc", b"test gpg signature") + + +def test_package_call_gpg_sign(): + package = PackageFile.from_filename( + str(FIXTURES / "artifacts/demo-0.0.1-py2.py3-none-any.whl"), None + ) + try: + package.sign(None) + finally: + try: + os.unlink(package.filename + ".asc") + except OSError: + pass + assert package.gpg_signature == (package.base_filename + ".asc", b"fake signature") + + +def test_repository_get_release_urls(project): + package_files = [ + PackageFile.from_filename(str(FIXTURES / "artifacts" / fn), None) + for fn in [ + "demo-0.0.1-py2.py3-none-any.whl", + "demo-0.0.1.tar.gz", + "demo-0.0.1.zip", + ] + ] + repository = Repository(project, "https://upload.pypi.org/legacy/", None, None) + assert repository.get_release_urls(package_files) == { + "https://pypi.org/project/demo/0.0.1/" + } + + repository = Repository(project, "https://example.pypi.org/legacy/", None, None) + assert not repository.get_release_urls(package_files) + + +@pytest.mark.usefixtures("prepare_packages") +def test_publish_pick_up_asc_files(project, uploaded, invoke): + for p in list(project.root.joinpath("dist").iterdir()): + with open(str(p) + ".asc", "w") as f: + f.write("fake signature") + + invoke(["publish", "--no-build"], obj=project, strict=True) + # Test wheels are uploaded first + assert uploaded[0].base_filename.endswith(".whl") + for package in uploaded: + assert package.gpg_signature == ( + package.base_filename + ".asc", + b"fake signature", + ) + + +@pytest.mark.usefixtures("prepare_packages") +def test_publish_package_with_signature(project, uploaded, invoke): + invoke(["publish", "--no-build", "-S"], obj=project, strict=True) + for package in uploaded: + assert package.gpg_signature == ( + package.base_filename + ".asc", + b"fake signature", + ) + + +@pytest.mark.usefixtures("local_finder") +def test_publish_and_build_in_one_run(fixture_project, invoke, mock_pypi): + project = fixture_project("demo-module") + result = invoke(["publish"], obj=project, strict=True).output + + mock_pypi.assert_called() + assert "Uploading demo_module-0.1.0-py3-none-any.whl" in result + assert "Uploading demo-module-0.1.0.tar.gz" in result + assert "https://pypi.org/project/demo-module/0.1.0/" in result + + +def test_publish_cli_args_and_env_var_precedence(project, monkeypatch): + repo = PublishCommand.get_repository( + project, Namespace(repository=None, username="foo", password="bar") + ) + assert repo.url == "https://upload.pypi.org/legacy/" + assert repo.session.auth == ("foo", "bar") + + with monkeypatch.context() as m: + m.setenv("PDM_PUBLISH_USERNAME", "bar") + m.setenv("PDM_PUBLISH_PASSWORD", "secret") + m.setenv("PDM_PUBLISH_REPO", "testpypi") + + repo = PublishCommand.get_repository( + project, Namespace(repository=None, username=None, password=None) + ) + assert repo.url == "https://test.pypi.org/legacy/" + assert repo.session.auth == ("bar", "secret") + + repo = PublishCommand.get_repository( + project, Namespace(repository="pypi", username="foo", password=None) + ) + assert repo.url == "https://upload.pypi.org/legacy/" + assert repo.session.auth == ("foo", "secret") diff --git a/tests/cli/test_remove.py b/tests/cli/test_remove.py index 93b60e24a1..7bfee3cdfd 100644 --- a/tests/cli/test_remove.py +++ b/tests/cli/test_remove.py @@ -12,22 +12,20 @@ def test_remove_command(project, invoke, mocker): @pytest.mark.usefixtures("repository", "working_set", "vcs") -def test_remove_both_normal_and_editable_packages(project, is_dev): +def test_remove_editable_packages_while_keeping_normal(project): project.environment.python_requires = PySpecSet(">=3.6") - actions.do_add(project, is_dev, packages=["demo"]) + actions.do_add(project, packages=["demo"]) actions.do_add( project, - is_dev, + True, editables=["git+https://github.com/test-root/demo.git#egg=demo"], ) - group = ( - project.tool_settings["dev-dependencies"]["dev"] - if is_dev - else project.meta["dependencies"] - ) - actions.do_remove(project, is_dev, packages=["demo"]) - assert not group - assert "demo" not in project.locked_repository.all_candidates + dev_group = project.tool_settings["dev-dependencies"]["dev"] + default_group = project.meta["dependencies"] + actions.do_remove(project, True, packages=["demo"]) + assert not dev_group + assert len(default_group) == 1 + assert not project.locked_repository.all_candidates["demo"].req.editable @pytest.mark.usefixtures("repository") diff --git a/tests/cli/test_run.py b/tests/cli/test_run.py index a8d1641e4e..8a5ce41ef0 100644 --- a/tests/cli/test_run.py +++ b/tests/cli/test_run.py @@ -5,10 +5,29 @@ from pathlib import Path from tempfile import TemporaryDirectory +import pytest + +from pdm import termui +from pdm.cli import actions from pdm.cli.actions import PEP582_PATH from pdm.utils import cd +@pytest.fixture +def _args(project): + (project.root / "args.py").write_text( + textwrap.dedent( + """ + import os + import sys + name = sys.argv[1] + args = ", ".join(sys.argv[2:]) + print(f"{name} CALLED with {args}" if args else f"{name} CALLED") + """ + ) + ) + + def test_pep582_launcher_for_python_interpreter(project, local_finder, invoke): project.root.joinpath("main.py").write_text( "import first;print(first.first([0, False, 1, 2]))\n" @@ -28,25 +47,42 @@ def test_auto_isolate_site_packages(project, invoke): env = os.environ.copy() env.update({"PYTHONPATH": PEP582_PATH}) proc = subprocess.run( - [str(project.python.executable), "-c", "import click"], env=env + [str(project.python.executable), "-c", "import sys;print(sys.path, sep='\\n')"], + env=env, + capture_output=True, + text=True, + cwd=str(project.root), + check=True, ) - assert proc.returncode == 0 + assert any("site-packages" in path for path in proc.stdout.splitlines()) - result = invoke(["run", "python", "-c", "import click"], obj=project) - if os.name != "nt": # os.environ handling seems problematic on Windows - assert result.exit_code != 0 + result = invoke( + ["run", "python", "-c", "import sys;print(sys.path, sep='\\n')"], + obj=project, + strict=True, + ) + assert not any("site-packages" in path for path in result.stdout.splitlines()) def test_run_with_site_packages(project, invoke): project.tool_settings["scripts"] = { - "foo": {"cmd": "python -c 'import click'", "site_packages": True} + "foo": { + "cmd": ["python", "-c", "import sys;print(sys.path, sep='\\n')"], + "site_packages": True, + } } project.write_pyproject() result = invoke( - ["run", "--site-packages", "python", "-c", "import click"], obj=project + [ + "run", + "--site-packages", + "python", + "-c", + "import sys;print(sys.path, sep='\\n')", + ], + obj=project, ) assert result.exit_code == 0 - result = invoke(["run", "foo"], obj=project) assert result.exit_code == 0 @@ -225,19 +261,35 @@ def test_run_script_override_global_env(project, invoke, capfd): def test_run_show_list_of_scripts(project, invoke): project.tool_settings["scripts"] = { + "test_composite": {"composite": ["test_cmd", "test_script", "test_shell"]}, "test_cmd": "flask db upgrade", + "test_multi": """\ + I am a multilines + command + """, "test_script": {"call": "test_script:main", "help": "call a python function"}, "test_shell": {"shell": "echo $FOO", "help": "shell command"}, } project.write_pyproject() result = invoke(["run", "--list"], obj=project) - result_lines = result.output.splitlines()[2:] - assert result_lines[0].strip() == "test_cmd cmd flask db upgrade" + result_lines = result.output.splitlines()[3:] + assert ( + result_lines[0][1:-1].strip() == "test_cmd │ cmd │ flask db upgrade" + ) + sep = termui.Emoji.ARROW_SEPARATOR + assert ( + result_lines[1][1:-1].strip() + == f"test_composite │ composite │ test_cmd {sep} test_script {sep} test_shell" + ) assert ( - result_lines[1].strip() - == "test_script call test_script:main call a python function" + result_lines[2][1:-1].strip() + == f"test_multi │ cmd │ I am a multilines{termui.Emoji.ELLIPSIS}" ) - assert result_lines[2].strip() == "test_shell shell echo $FOO shell command" + assert ( + result_lines[3][1:-1].strip() + == "test_script │ call │ call a python function" + ) + assert result_lines[4][1:-1].strip() == "test_shell │ shell │ shell command" def test_run_with_another_project_root(project, local_finder, invoke, capfd): @@ -251,8 +303,8 @@ def test_run_with_another_project_root(project, local_finder, invoke, capfd): capfd.readouterr() with cd(tmp_dir): ret = invoke(["run", "-p", str(project.root), "python", "main.py"]) - assert ret.exit_code == 0 - out, _ = capfd.readouterr() + out, err = capfd.readouterr() + assert ret.exit_code == 0, err assert out.strip() == "1" @@ -296,42 +348,254 @@ def test_run_with_patched_sysconfig(project, invoke, capfd): assert "__pypackages__" in out["purelib"] -def test_pre_and_post_hooks(project, invoke, capfd): +def test_run_composite(project, invoke, capfd, _echo): + project.tool_settings["scripts"] = { + "first": "python echo.py First", + "second": "python echo.py Second", + "test": {"composite": ["first", "second"]}, + } + project.write_pyproject() + capfd.readouterr() + invoke(["run", "test"], strict=True, obj=project) + out, _ = capfd.readouterr() + assert "First CALLED" in out + assert "Second CALLED" in out + + +def test_composite_stops_on_first_failure(project, invoke, capfd): project.tool_settings["scripts"] = { - "pre_install": "python -c \"print('PRE INSTALL CALLED')\"", - "post_install": "python -c \"print('POST INSTALL CALLED')\"", + "first": {"cmd": ["python", "-c", "print('First CALLED')"]}, + "fail": "python -c 'raise Exception'", + "second": "echo 'Second CALLED'", + "test": {"composite": ["first", "fail", "second"]}, } project.write_pyproject() - invoke(["install"], strict=True, obj=project) + capfd.readouterr() + result = invoke(["run", "test"], obj=project) + assert result.exit_code == 1 + out, _ = capfd.readouterr() + assert "First CALLED" in out + assert "Second CALLED" not in out + + +def test_composite_inherit_env(project, invoke, capfd, _echo): + project.tool_settings["scripts"] = { + "first": { + "cmd": "python echo.py First VAR", + "env": {"VAR": "42"}, + }, + "second": { + "cmd": "python echo.py Second VAR", + "env": {"VAR": "42"}, + }, + "test": {"composite": ["first", "second"], "env": {"VAR": "overriden"}}, + } + project.write_pyproject() + capfd.readouterr() + invoke(["run", "test"], strict=True, obj=project) out, _ = capfd.readouterr() - assert "PRE INSTALL CALLED" in out - assert "POST INSTALL CALLED" in out + assert "First CALLED with VAR=overriden" in out + assert "Second CALLED with VAR=overriden" in out -def test_pre_script_fail_fast(project, invoke, capfd, mocker): +def test_composite_fail_on_first_missing_task(project, invoke, capfd, _echo): project.tool_settings["scripts"] = { - "pre_install": "python -c \"print('PRE INSTALL CALLED'); exit(1)\"", - "post_install": "python -c \"print('POST INSTALL CALLED')\"", + "first": "python echo.py First", + "second": "python echo.py Second", + "test": {"composite": ["first", "fail", "second"]}, } project.write_pyproject() - synchronize = mocker.patch("pdm.installers.synchronizers.Synchronizer.synchronize") - result = invoke(["install"], obj=project) + capfd.readouterr() + result = invoke(["run", "test"], obj=project) assert result.exit_code == 1 out, _ = capfd.readouterr() - assert "PRE INSTALL CALLED" in out - assert "POST INSTALL CALLED" not in out - synchronize.assert_not_called() + assert "First CALLED" in out + assert "Second CALLED" not in out + + +def test_composite_runs_all_hooks(project, invoke, capfd, _echo): + project.tool_settings["scripts"] = { + "test": {"composite": ["first", "second"]}, + "pre_test": "python echo.py Pre-Test", + "post_test": "python echo.py Post-Test", + "first": "python echo.py First", + "pre_first": "python echo.py Pre-First", + "second": "python echo.py Second", + "post_second": "python echo.py Post-Second", + } + project.write_pyproject() + capfd.readouterr() + invoke(["run", "test"], strict=True, obj=project) + out, _ = capfd.readouterr() + assert "Pre-Test CALLED" in out + assert "Pre-First CALLED" in out + assert "First CALLED" in out + assert "Second CALLED" in out + assert "Post-Second CALLED" in out + assert "Post-Test CALLED" in out + + +def test_composite_pass_parameters_to_subtasks(project, invoke, capfd, _args): + project.tool_settings["scripts"] = { + "test": {"composite": ["first", "second"]}, + "pre_test": "python args.py Pre-Test", + "post_test": "python args.py Post-Test", + "first": "python args.py First", + "pre_first": "python args.py Pre-First", + "second": "python args.py Second", + "post_second": "python args.py Post-Second", + } + project.write_pyproject() + capfd.readouterr() + invoke(["run", "test", "param=value"], strict=True, obj=project) + out, _ = capfd.readouterr() + assert "Pre-Test CALLED" in out + assert "Pre-First CALLED" in out + assert "First CALLED with param=value" in out + assert "Second CALLED with param=value" in out + assert "Post-Second CALLED" in out + assert "Post-Test CALLED" in out + + +def test_composite_can_pass_parameters(project, invoke, capfd, _args): + project.tool_settings["scripts"] = { + "test": {"composite": ["first param=first", "second param=second"]}, + "pre_test": "python args.py Pre-Test", + "post_test": "python args.py Post-Test", + "first": "python args.py First", + "pre_first": "python args.py Pre-First", + "second": "python args.py Second", + "post_second": "python args.py Post-Second", + } + project.write_pyproject() + capfd.readouterr() + invoke(["run", "test"], strict=True, obj=project) + out, _ = capfd.readouterr() + assert "Pre-Test CALLED" in out + assert "Pre-First CALLED" in out + assert "First CALLED with param=first" in out + assert "Second CALLED with param=second" in out + assert "Post-Second CALLED" in out + assert "Post-Test CALLED" in out + + +def test_composite_hooks_inherit_env(project, invoke, capfd, _echo): + project.tool_settings["scripts"] = { + "pre_task": {"cmd": "python echo.py Pre-Task VAR", "env": {"VAR": "42"}}, + "task": "python echo.py Task", + "post_task": {"cmd": "python echo.py Post-Task VAR", "env": {"VAR": "42"}}, + "test": {"composite": ["task"], "env": {"VAR": "overriden"}}, + } + project.write_pyproject() + capfd.readouterr() + invoke(["run", "test"], strict=True, obj=project) + out, _ = capfd.readouterr() + assert "Pre-Task CALLED with VAR=overriden" in out + assert "Task CALLED" in out + assert "Post-Task CALLED with VAR=overriden" in out + + +def test_composite_inherit_env_in_cascade(project, invoke, capfd, _echo): + project.tool_settings["scripts"] = { + "_": {"env": {"FOO": "BAR", "TIK": "TOK"}}, + "pre_task": { + "cmd": "python echo.py Pre-Task VAR FOO TIK", + "env": {"VAR": "42", "FOO": "foobar"}, + }, + "task": { + "cmd": "python echo.py Task VAR FOO TIK", + "env": {"VAR": "42", "FOO": "foobar"}, + }, + "post_task": { + "cmd": "python echo.py Post-Task VAR FOO TIK", + "env": {"VAR": "42", "FOO": "foobar"}, + }, + "test": {"composite": ["task"], "env": {"VAR": "overriden"}}, + } + project.write_pyproject() + capfd.readouterr() + invoke(["run", "test"], strict=True, obj=project) + out, _ = capfd.readouterr() + assert "Pre-Task CALLED with VAR=overriden FOO=foobar TIK=TOK" in out + assert "Task CALLED with VAR=overriden FOO=foobar TIK=TOK" in out + assert "Post-Task CALLED with VAR=overriden FOO=foobar TIK=TOK" in out -def test_pre_and_post_scripts(project, invoke, capfd): +def test_composite_inherit_dotfile(project, invoke, capfd, _echo): + (project.root / ".env").write_text("VAR=42") + (project.root / "override.env").write_text("VAR=overriden") project.tool_settings["scripts"] = { - "pre_test": "python -c \"print('PRE test CALLED')\"", - "test": "python -c \"print('IN test CALLED')\"", - "post_test": "python -c \"print('POST test CALLED')\"", + "pre_task": {"cmd": "python echo.py Pre-Task VAR", "env_file": ".env"}, + "task": {"cmd": "python echo.py Task VAR", "env_file": ".env"}, + "post_task": {"cmd": "python echo.py Post-Task VAR", "env_file": ".env"}, + "test": {"composite": ["task"], "env_file": "override.env"}, } project.write_pyproject() + capfd.readouterr() invoke(["run", "test"], strict=True, obj=project) out, _ = capfd.readouterr() - assert "PRE test CALLED" in out - assert "IN test CALLED" in out - assert "POST test CALLED" in out + assert "Pre-Task CALLED with VAR=overriden" in out + assert "Task CALLED with VAR=overriden" in out + assert "Post-Task CALLED with VAR=overriden" in out + + +def test_composite_can_have_commands(project, invoke, capfd): + project.tool_settings["scripts"] = { + "task": {"cmd": ["python", "-c", 'print("Task CALLED")']}, + "test": {"composite": ["task", "python -c 'print(\"Command CALLED\")'"]}, + } + project.write_pyproject() + capfd.readouterr() + invoke(["run", "-v", "test"], strict=True, obj=project) + out, _ = capfd.readouterr() + assert "Task CALLED" in out + assert "Command CALLED" in out + + +def test_run_shortcut(project, invoke, capfd): + project.tool_settings["scripts"] = { + "test": "echo 'Everything is fine'", + } + project.write_pyproject() + capfd.readouterr() + result = invoke(["test"], obj=project, strict=True) + assert result.exit_code == 0 + out, _ = capfd.readouterr() + assert "Everything is fine" in out + + +def test_run_shortcuts_dont_override_commands(project, invoke, capfd, mocker): + do_lock = mocker.patch.object(actions, "do_lock") + do_sync = mocker.patch.object(actions, "do_sync") + project.tool_settings["scripts"] = { + "install": "echo 'Should not run'", + } + project.write_pyproject() + capfd.readouterr() + result = invoke(["install"], obj=project, strict=True) + assert result.exit_code == 0 + out, _ = capfd.readouterr() + assert "Should not run" not in out + do_lock.assert_called_once() + do_sync.assert_called_once() + + +def test_run_shortcut_fail_with_usage_if_script_not_found(project, invoke): + result = invoke(["whatever"], obj=project) + assert result.exit_code != 0 + assert "Command unknown: whatever" in result.stderr + assert "Usage" in result.stderr + + +@pytest.mark.parametrize( + "args", + [ + pytest.param([], id="no args"), + pytest.param(["-ko"], id="unknown param"), + pytest.param(["pip", "--version"], id="not an user script"), + ], +) +def test_empty_positionnal_args_still_display_usage(project, invoke, args): + result = invoke(args, obj=project) + assert result.exit_code != 0 + assert "Usage" in result.stderr diff --git a/tests/conftest.py b/tests/conftest.py index 61c40d7cfe..8f7af8981d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,22 +1,26 @@ +from __future__ import annotations + import collections +import functools import json import os -import re import shutil import sys from contextlib import contextmanager -from io import BytesIO +from dataclasses import dataclass +from io import BytesIO, StringIO from pathlib import Path -from typing import Callable, Dict, Iterable, List, Optional, Tuple -from urllib.parse import urlparse +from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple +from urllib.parse import unquote, urlparse import pytest -from click.testing import CliRunner -from pip._internal.vcs import versioncontrol -from pip._vendor import requests +import requests +from packaging.version import parse as parse_version +from unearth.vcs import Git, vcs_support from pdm._types import CandidateInfo from pdm.cli.actions import do_init, do_use +from pdm.cli.hooks import HookManager from pdm.core import Core from pdm.exceptions import CandidateInfoNotFound from pdm.models.candidates import Candidate @@ -27,11 +31,13 @@ filter_requirements_with_extras, parse_requirement, ) +from pdm.models.session import PDMSession from pdm.project.config import Config -from pdm.utils import get_finder, normalize_name +from pdm.project.core import Project +from pdm.utils import normalize_name, path_to_url from tests import FIXTURES -os.environ["CI"] = "1" +os.environ.update(CI="1", PDM_CHECK_UPDATE="0") @contextmanager @@ -45,19 +51,41 @@ def temp_environ(): class LocalFileAdapter(requests.adapters.BaseAdapter): - def __init__(self, base_path): + def __init__(self, aliases, overrides=None, strip_suffix=False): super().__init__() - self.base_path = base_path + self.aliases = sorted( + aliases.items(), key=lambda item: len(item[0]), reverse=True + ) + self.overrides = overrides if overrides is not None else {} + self.strip_suffix = strip_suffix self._opened_files = [] + def get_file_path(self, path): + for prefix, base_path in self.aliases: + if path.startswith(prefix): + file_path = base_path / path[len(prefix) :].lstrip("/") + if not self.strip_suffix: + return file_path + return next( + (p for p in file_path.parent.iterdir() if p.stem == file_path.name), + None, + ) + return None + def send( self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None ): - file_path = self.base_path / urlparse(request.url).path.lstrip("/") + request_path = urlparse(request.url).path + file_path = self.get_file_path(request_path) response = requests.models.Response() response.url = request.url response.request = request - if not file_path.exists(): + if request_path in self.overrides: + response.status_code = 200 + response.reason = "OK" + response.raw = BytesIO(self.overrides[request_path]) + response.headers["Content-Type"] = "text/html" + elif file_path is None or not file_path.exists(): response.status_code = 404 response.reason = "Not Found" response.raw = BytesIO(b"Not Found") @@ -76,21 +104,18 @@ def close(self): self._opened_files.clear() -class MockVersionControl(versioncontrol.VersionControl): - def obtain(self, dest, url, verbosity=0): - url, _ = self.get_url_rev_options(url) - path = os.path.splitext(os.path.basename(urlparse(str(url)).path))[0] +class MockGit(Git): + def fetch_new(self, location, url, rev, args): + path = os.path.splitext(os.path.basename(unquote(urlparse(str(url)).path)))[0] mocked_path = FIXTURES / "projects" / path - shutil.copytree(mocked_path, dest) + shutil.copytree(mocked_path, location) - @classmethod - def get_revision(cls, location): + def get_revision(self, location: Path) -> str: return "1234567890abcdef" - def is_immutable_rev_checkout(self, url: str, dest: str) -> bool: - if "@1234567890abcdef" in url: - return True - return super().is_immutable_rev_checkout(url, dest) + def is_immutable_revision(self, location, link) -> bool: + rev = self.get_url_and_rev_options(link)[1] + return rev == "1234567890abcdef" class _FakeLink: @@ -137,7 +162,11 @@ def get_hashes(self, candidate: Candidate) -> Optional[Dict[str, str]]: return {} def _find_candidates(self, requirement: Requirement) -> Iterable[Candidate]: - for version, candidate in self._pypi_data.get(requirement.key, {}).items(): + for version, candidate in sorted( + self._pypi_data.get(requirement.key, {}).items(), + key=lambda item: parse_version(item[0]), + reverse=True, + ): c = Candidate( requirement, name=requirement.project_name, @@ -220,18 +249,14 @@ def uninstall(dist): yield rv -def get_local_finder(*args, **kwargs): - finder = get_finder(*args, **kwargs) - finder.session.mount("http://fixtures.test/", LocalFileAdapter(FIXTURES)) - return finder - - -@pytest.fixture(autouse=True) -def pip_global_tempdir_manager(): - from pdm.models.pip_shims import global_tempdir_manager - - with global_tempdir_manager(): - yield +def get_pypi_session(*args, overrides=None, **kwargs): + session = PDMSession(*args, **kwargs) + session.mount("http://fixtures.test/", LocalFileAdapter({"/": FIXTURES})) + session.mount( + "https://my.pypi.org/", + LocalFileAdapter({"/simple": FIXTURES / "index"}, overrides, strip_suffix=True), + ) + return session def remove_pep582_path_from_pythonpath(pythonpath): @@ -251,7 +276,12 @@ def core(): @pytest.fixture() -def project_no_init(tmp_path, mocker, core): +def index(): + return {} + + +@pytest.fixture() +def project_no_init(tmp_path, mocker, core, index): test_home = tmp_path / ".pdm-home" test_home.mkdir(parents=True) test_home.joinpath("config.toml").write_text( @@ -262,11 +292,17 @@ def project_no_init(tmp_path, mocker, core): p = core.create_project( tmp_path, global_config=test_home.joinpath("config.toml").as_posix() ) - mocker.patch("pdm.utils.get_finder", get_local_finder) - mocker.patch("pdm.models.environment.get_finder", get_local_finder) + mocker.patch( + "pdm.models.environment.PDMSession", + functools.partial(get_pypi_session, overrides=index), + ) tmp_path.joinpath("caches").mkdir(parents=True) p.global_config["cache_dir"] = tmp_path.joinpath("caches").as_posix() - do_use(p, getattr(sys, "_base_executable", sys.executable)) + do_use( + p, + getattr(sys, "_base_executable", sys.executable), + HookManager(p, ["post_use"]), + ) with temp_environ(): os.environ.pop("VIRTUAL_ENV", None) os.environ.pop("CONDA_PREFIX", None) @@ -281,14 +317,23 @@ def project_no_init(tmp_path, mocker, core): @pytest.fixture() def local_finder(project_no_init, mocker): - return_value = ["--no-index", "--find-links", str(FIXTURES / "artifacts")] - mocker.patch("pdm.utils.prepare_pip_source_args", return_value=return_value) + artifacts_dir = str(FIXTURES / "artifacts") + return_value = ["--no-index", "--find-links", artifacts_dir] mocker.patch("pdm.builders.base.prepare_pip_source_args", return_value=return_value) + project_no_init.tool_settings["source"] = [ + { + "type": "find_links", + "verify_ssl": False, + "url": path_to_url(artifacts_dir), + } + ] + project_no_init.write_pyproject() @pytest.fixture() def project(project_no_init): - do_init(project_no_init, "test_project", "0.0.0") + hooks = HookManager(project_no_init, ["post_init"]) + do_init(project_no_init, "test_project", "0.0.0", hooks=hooks) # Clean the cached property project_no_init._environment = None return project_no_init @@ -325,16 +370,9 @@ def repository(project, mocker, local_finder): @pytest.fixture() -def vcs(mocker): - ret = MockVersionControl() - mocker.patch( - "pip._internal.vcs.versioncontrol.VcsSupport.get_backend", return_value=ret - ) - mocker.patch( - "pip._internal.vcs.versioncontrol.VcsSupport.get_backend_for_scheme", - return_value=ret, - ) - yield ret +def vcs(monkeypatch): + monkeypatch.setattr(vcs_support, "_registry", {"git": MockGit}) + return @pytest.fixture(params=[False, True]) @@ -347,14 +385,61 @@ def is_dev(request): return request.param +@dataclass +class RunResult: + exit_code: int + stdout: str + stderr: str + exception: Optional[Exception] = None + + @property + def output(self) -> str: + return self.stdout + + @property + def outputs(self) -> str: + return self.stdout + self.stderr + + def print(self): + print("# exit code:", self.exit_code) + print("# stdout:", self.stdout, sep="\n") + print("# stderr:", self.stderr, sep="\n") + + @pytest.fixture() -def invoke(core): - runner = CliRunner(mix_stderr=False) +def invoke(core, monkeypatch): + def caller( + args, + strict: bool = False, + input: Optional[str] = None, + obj: Optional[Project] = None, + env: Optional[Mapping[str, str]] = None, + **kwargs, + ): + __tracebackhide__ = True + + stdin = StringIO(input) + stdout = StringIO() + stderr = StringIO() + exit_code = 0 + exception = None + + with monkeypatch.context() as m: + m.setattr("sys.stdin", stdin) + m.setattr("sys.stdout", stdout) + m.setattr("sys.stderr", stderr) + for key, value in (env or {}).items(): + m.setenv(key, value) + try: + core.main(args, "pdm", obj=obj, **kwargs) + except SystemExit as e: + exit_code = e.code + except Exception as e: + exit_code = 1 + exception = e + + result = RunResult(exit_code, stdout.getvalue(), stderr.getvalue(), exception) - def caller(args, strict=False, **kwargs): - result = runner.invoke( - core, args, catch_exceptions=not strict, prog_name="pdm", **kwargs - ) if strict and result.exit_code != 0: raise RuntimeError( f"Call command {args} failed({result.exit_code}): {result.stderr}" @@ -362,26 +447,3 @@ def caller(args, strict=False, **kwargs): return result return caller - - -@pytest.fixture() -def index(): - from pip._internal.index.collector import HTMLPage, LinkCollector - - old_fetcher = LinkCollector.fetch_page - fake_index = {} - - def fetch_page(self, location): - m = re.search(r"/simple/([^/]+)/?", location.url) - if not m: - return old_fetcher(self, location) - name = m.group(1) - if name not in fake_index: - fake_index[name] = (FIXTURES / f"index/{name}.html").read_bytes() - return HTMLPage( - fake_index[name], "utf-8", location.url, cache_link_parsing=False - ) - - LinkCollector.fetch_page = fetch_page - yield fake_index - LinkCollector.fetch_page = old_fetcher diff --git a/tests/fixtures/artifacts/pdm_pep517-0.12.0-py3-none-any.whl b/tests/fixtures/artifacts/pdm_pep517-1.0.0-py3-none-any.whl similarity index 86% rename from tests/fixtures/artifacts/pdm_pep517-0.12.0-py3-none-any.whl rename to tests/fixtures/artifacts/pdm_pep517-1.0.0-py3-none-any.whl index 0f7021c910..a2359e05e3 100644 Binary files a/tests/fixtures/artifacts/pdm_pep517-0.12.0-py3-none-any.whl and b/tests/fixtures/artifacts/pdm_pep517-1.0.0-py3-none-any.whl differ diff --git a/tests/fixtures/projects/poetry-demo/pyproject.toml b/tests/fixtures/projects/poetry-demo/pyproject.toml index e38a5970ac..ea4e5f70b9 100644 --- a/tests/fixtures/projects/poetry-demo/pyproject.toml +++ b/tests/fixtures/projects/poetry-demo/pyproject.toml @@ -4,13 +4,17 @@ version = "0.1.0" authors = ["Thomas Kluyver "] homepage = "https://github.com/takluyver/flit" license = "BSD-3-Clause" - +description = "A demo project for Poetry" classifiers = [ "Intended Audience :: Developers", "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", ] +packages = [ + { include = "mylib.py" }, +] + [tool.poetry.urls] Documentation = "https://flit.readthedocs.io/en/latest/" diff --git a/pdm/_vendor/__init__.py b/tests/fixtures/projects/test-hatch-static/README.md similarity index 100% rename from pdm/_vendor/__init__.py rename to tests/fixtures/projects/test-hatch-static/README.md diff --git a/tests/fixtures/projects/test-hatch-static/pyproject.toml b/tests/fixtures/projects/test-hatch-static/pyproject.toml new file mode 100644 index 0000000000..f69690c62a --- /dev/null +++ b/tests/fixtures/projects/test-hatch-static/pyproject.toml @@ -0,0 +1,16 @@ +[build-system] +requires = ["hatchling>=0.15.0"] +build-backend = "hatchling.build" + +[project] +name = "test-hatch" +version = "0.1.0" +description = "Test hatch project" +readme = "README.md" +license = "MIT" +requires-python = ">=3.7" +authors = [{ name = "John", email = "john@example.org" }] +classifiers = [ + "License :: OSI Approved :: MIT License", +] +dependencies = ["requests", "click"] diff --git a/tests/fixtures/projects/test-setuptools/AUTHORS b/tests/fixtures/projects/test-setuptools/AUTHORS new file mode 100644 index 0000000000..900c418024 --- /dev/null +++ b/tests/fixtures/projects/test-setuptools/AUTHORS @@ -0,0 +1 @@ +frostming diff --git a/tests/fixtures/projects/test-setuptools/README.md b/tests/fixtures/projects/test-setuptools/README.md new file mode 100644 index 0000000000..005ce84941 --- /dev/null +++ b/tests/fixtures/projects/test-setuptools/README.md @@ -0,0 +1 @@ +# My Module diff --git a/tests/fixtures/projects/test-setuptools/mymodule.py b/tests/fixtures/projects/test-setuptools/mymodule.py new file mode 100644 index 0000000000..3dc1f76bc6 --- /dev/null +++ b/tests/fixtures/projects/test-setuptools/mymodule.py @@ -0,0 +1 @@ +__version__ = "0.1.0" diff --git a/tests/fixtures/projects/test-setuptools/setup.cfg b/tests/fixtures/projects/test-setuptools/setup.cfg new file mode 100644 index 0000000000..8275a81b4b --- /dev/null +++ b/tests/fixtures/projects/test-setuptools/setup.cfg @@ -0,0 +1,20 @@ +[metadata] +name = mymodule +description = A test module +keywords = one, two +classifiers = + Framework :: Django + Programming Language :: Python :: 3 + +[options] +zip_safe = False +include_package_data = True +python_requires = >=3.5 +package_dir = = src +install_requires = + requests + importlib-metadata; python_version<"3.8" + +[options.entry_points] +console_scripts = + mycli = mymodule:main diff --git a/tests/fixtures/projects/test-setuptools/setup.py b/tests/fixtures/projects/test-setuptools/setup.py new file mode 100644 index 0000000000..58d6436d46 --- /dev/null +++ b/tests/fixtures/projects/test-setuptools/setup.py @@ -0,0 +1,18 @@ +from setuptools import setup +from mymodule import __version__ + +with open("AUTHORS", "r") as f: + authors = f.read().strip() + +kwargs = { + "name": "mymodule", + "version": __version__, + "author": authors, +} + +if 1 + 1 >= 2: + kwargs.update(license="MIT") + + +if __name__ == "__main__": + setup(**kwargs) diff --git a/tests/fixtures/pyproject-legacy.toml b/tests/fixtures/pyproject-legacy.toml deleted file mode 100644 index dd5916f826..0000000000 --- a/tests/fixtures/pyproject-legacy.toml +++ /dev/null @@ -1,29 +0,0 @@ -[build-system] -build-backend = "pdm.pep517.api" -requires = ["pdm-pep517"] - -[tool] -[tool.pdm] -author = "frostming " -description = "" -homepage = "" -license = "MIT" -name = "demo-package" -python_requires = ">=3.5" -version = { from = "my_package/__init__.py" } -readme = "README.md" - -extras = ["test"] - -[[tool.pdm.source]] -url = "https://test.pypi.org/simple" -verify_ssl = true -name = "testpypi" - -[tool.pdm.dependencies] -flask = "*" - -[tool.pdm.dev-dependencies] - -[tool.pdm.test-dependencies] -pytest = "*" diff --git a/tests/models/test_candidates.py b/tests/models/test_candidates.py index 9e0bc9ea48..3e1f3ab011 100644 --- a/tests/models/test_candidates.py +++ b/tests/models/test_candidates.py @@ -1,12 +1,12 @@ import shutil -from pathlib import Path import pytest +from unearth import Link from pdm.exceptions import ExtrasWarning from pdm.models.candidates import Candidate -from pdm.models.pip_shims import Link, path_to_url from pdm.models.requirements import parse_requirement +from pdm.utils import path_to_url from tests import FIXTURES @@ -69,7 +69,7 @@ def test_parse_metadata_with_extras(project): ) candidate = Candidate(req) prepared = candidate.prepare(project.environment) - assert prepared.ireq.is_wheel + assert prepared.link.is_wheel assert sorted(prepared.get_dependencies_from_metadata()) == [ "pytest", 'requests; python_version >= "3.6"', @@ -83,7 +83,7 @@ def test_parse_remote_link_metadata(project): ) candidate = Candidate(req) prepared = candidate.prepare(project.environment) - assert prepared.ireq.is_wheel + assert prepared.link.is_wheel assert prepared.get_dependencies_from_metadata() == [ "idna", 'chardet; os_name == "nt"', @@ -99,7 +99,7 @@ def test_extras_warning(project, recwarn): ) candidate = Candidate(req) prepared = candidate.prepare(project.environment) - assert prepared.ireq.is_wheel + assert prepared.link.is_wheel assert prepared.get_dependencies_from_metadata() == [] warning = recwarn.pop(ExtrasWarning) assert str(warning.message) == "Extras not found for demo: [foo]" @@ -238,24 +238,26 @@ def test_vcs_candidate_in_subdirectory(project, is_editable): @pytest.mark.usefixtures("local_finder") def test_sdist_candidate_with_wheel_cache(project, mocker): file_link = Link(path_to_url((FIXTURES / "artifacts/demo-0.0.1.tar.gz").as_posix())) - built_path = (FIXTURES / "artifacts/demo-0.0.1-py2.py3-none-any.whl").as_posix() + built_path = FIXTURES / "artifacts/demo-0.0.1-py2.py3-none-any.whl" wheel_cache = project.make_wheel_cache() - cache_path = wheel_cache.get_path_for_link(file_link) - if not Path(cache_path).exists(): - Path(cache_path).mkdir(parents=True) + cache_path = wheel_cache.get_path_for_link( + file_link, project.environment.target_python + ) + if not cache_path.exists(): + cache_path.mkdir(parents=True) shutil.copy2(built_path, cache_path) req = parse_requirement(file_link.url) - downloader = mocker.patch("pdm.models.pip_shims.unpack_url") + downloader = mocker.patch("unearth.finder.unpack_link") prepared = Candidate(req).prepare(project.environment) - prepared.prepare_metadata() + prepared.metadata downloader.assert_not_called() - assert Path(prepared.wheel) == Path(cache_path) / Path(built_path).name + assert prepared.wheel == cache_path / built_path.name prepared.wheel = None builder = mocker.patch("pdm.builders.WheelBuilder.build") wheel = prepared.build() builder.assert_not_called() - assert Path(wheel) == Path(cache_path) / Path(built_path).name + assert wheel == cache_path / built_path.name @pytest.mark.usefixtures("vcs", "local_finder") @@ -264,7 +266,7 @@ def test_cache_vcs_immutable_revision(project): candidate = Candidate(req) wheel = candidate.prepare(project.environment).build() with pytest.raises(ValueError): - Path(wheel).relative_to(project.cache_dir) + wheel.relative_to(project.cache_dir) assert candidate.get_revision() == "1234567890abcdef" req = parse_requirement( @@ -272,12 +274,12 @@ def test_cache_vcs_immutable_revision(project): ) candidate = Candidate(req) wheel = candidate.prepare(project.environment).build() - assert Path(wheel).relative_to(project.cache_dir) + assert wheel.relative_to(project.cache_dir) assert candidate.get_revision() == "1234567890abcdef" # test the revision can be got correctly after cached prepared = Candidate(req).prepare(project.environment) - assert not prepared.ireq.source_dir + assert not prepared._source_dir assert prepared.revision == "1234567890abcdef" @@ -286,28 +288,35 @@ def test_cache_egg_info_sdist(project): req = parse_requirement("demo @ http://fixtures.test/artifacts/demo-0.0.1.tar.gz") candidate = Candidate(req) wheel = candidate.prepare(project.environment).build() - assert Path(wheel).relative_to(project.cache_dir) + assert wheel.relative_to(project.cache_dir) -def test_invalidate_incompatible_wheel_link(project, index): +def test_invalidate_incompatible_wheel_link(project): + project.project_config["pypi.url"] = "https://my.pypi.org/simple" req = parse_requirement("demo") - prepared = Candidate(req, name="demo", version="0.0.1").prepare(project.environment) + prepared = Candidate( + req, + name="demo", + version="0.0.1", + link=Link("http://fixtures.test/artifacts/demo-0.0.1-cp36-cp36m-win_amd64.whl"), + ).prepare(project.environment) prepared.obtain(True) assert ( - Path(prepared.wheel).name - == prepared.ireq.link.filename + prepared.wheel.name + == prepared.link.filename == "demo-0.0.1-cp36-cp36m-win_amd64.whl" ) prepared.obtain(False) assert ( - Path(prepared.wheel).name - == prepared.ireq.link.filename + prepared.wheel.name + == prepared.link.filename == "demo-0.0.1-py2.py3-none-any.whl" ) -def test_legacy_pep345_tag_link(project, index): +def test_legacy_pep345_tag_link(project): + project.project_config["pypi.url"] = "https://my.pypi.org/simple" req = parse_requirement("pep345-legacy") repo = project.get_repository() candidate = next(iter(repo.find_candidates(req))) @@ -325,3 +334,24 @@ def test_find_candidates_from_find_links(project): ] candidates = list(repo.find_candidates(parse_requirement("demo"))) assert len(candidates) == 2 + + +def test_parse_metadata_from_pep621(project, mocker): + builder = mocker.patch("pdm.builders.wheel.WheelBuilder.build") + req = parse_requirement( + f"test-hatch @ file://{FIXTURES.as_posix()}/projects/test-hatch-static" + ) + candidate = Candidate(req) + metadata = candidate.prepare(project.environment).metadata + assert sorted(metadata.requires) == ["click", "requests"] + builder.assert_not_called() + + +def test_parse_metadata_with_dynamic_fields(project, local_finder): + req = parse_requirement( + f"demo-package @ file://{FIXTURES.as_posix()}/projects/demo-src-package" + ) + candidate = Candidate(req) + metadata = candidate.prepare(project.environment).metadata + assert not metadata.requires + assert metadata.version == "0.1.0" diff --git a/tests/models/test_requirements.py b/tests/models/test_requirements.py index ed87e27e4d..678721987c 100644 --- a/tests/models/test_requirements.py +++ b/tests/models/test_requirements.py @@ -2,8 +2,8 @@ import pytest -from pdm.models.pip_shims import path_to_url from pdm.models.requirements import RequirementError, parse_requirement +from pdm.utils import path_to_url from tests import FIXTURES FILE_PREFIX = "file:///" if os.name == "nt" else "file://" @@ -62,7 +62,6 @@ @pytest.mark.parametrize("req, result", REQUIREMENTS) def test_convert_req_dict_to_req_line(req, result): r = parse_requirement(req) - assert r.as_ireq() result = result or req assert r.as_line() == result diff --git a/tests/resolver/test_resolve.py b/tests/resolver/test_resolve.py index e60caf1767..261069633c 100644 --- a/tests/resolver/test_resolve.py +++ b/tests/resolver/test_resolve.py @@ -97,6 +97,7 @@ def test_resolve_with_extras(resolve): f"{(FIXTURES / 'artifacts/demo-0.0.1.tar.gz').as_posix()}", f"{(FIXTURES / 'artifacts/demo-0.0.1-py2.py3-none-any.whl').as_posix()}", ], + ids=["sdist", "wheel"], ) def test_resolve_local_artifacts(resolve, requirement_line): result = resolve([requirement_line], ">=3.6") diff --git a/tests/test_formats.py b/tests/test_formats.py index 4f64d6e2d6..33f6f5a80a 100644 --- a/tests/test_formats.py +++ b/tests/test_formats.py @@ -1,7 +1,7 @@ import shutil from argparse import Namespace -from pdm.formats import flit, legacy, pipfile, poetry, requirements, setup_py +from pdm.formats import flit, pipfile, poetry, requirements, setup_py from pdm.models.requirements import parse_requirement from pdm.utils import cd from tests import FIXTURES @@ -35,7 +35,7 @@ def test_convert_requirements_file(project, is_dev): assert 'whoosh==2.7.4; sys_platform == "win32"' in group assert "-e git+https://github.com/pypa/pip.git@main#egg=pip" in group assert ( - "pep508-package@ git+https://github.com/techalchemy/test-project.git" + "pep508-package @ git+https://github.com/techalchemy/test-project.git" "@master#subdirectory=parent_folder/pep508-package" in group ) @@ -50,7 +50,7 @@ def test_convert_requirements_file_without_name(project, vcs): project, str(req_file), Namespace(dev=False, group=None) ) - assert result["dependencies"] == ["demo@ git+https://github.com/test-root/demo.git"] + assert result["dependencies"] == ["git+https://github.com/test-root/demo.git"] def test_convert_poetry(project): @@ -84,8 +84,9 @@ def test_convert_poetry(project): assert result["entry-points"]["blogtool.parsers"] == { ".rst": "some_module:SomeClass" } - assert settings["includes"] == ["lib/my_package", "tests", "CHANGELOG.md"] - assert settings["excludes"] == ["my_package/excluded.py"] + build = settings["build"] + assert build["includes"] == ["lib/my_package", "tests", "CHANGELOG.md"] + assert build["excludes"] == ["my_package/excluded.py"] def test_convert_flit(project): @@ -120,23 +121,9 @@ def test_convert_flit(project): result["entry-points"]["pygments.lexers"]["dogelang"] == "dogelang.lexer:DogeLexer" ) - assert settings["includes"] == ["doc/"] - assert settings["excludes"] == ["doc/*.html"] - - -def test_convert_legacy_format(project): - golden_file = FIXTURES / "pyproject-legacy.toml" - assert legacy.check_fingerprint(project, golden_file) - result, settings = legacy.convert(project, golden_file, None) - - assert result["name"] == "demo-package" - assert result["authors"][0] == {"name": "frostming", "email": "mianghong@gmail.com"} - assert result["license"] == {"text": "MIT"} - assert sorted(result["dynamic"]) == ["classifiers", "version"] - assert result["dependencies"] == ["flask"] - assert not result.get("dev-dependencies", {}).get("dev") - assert result["optional-dependencies"]["test"] == ["pytest"] - assert settings["source"][0]["url"] == "https://test.pypi.org/simple" + build = settings["build"] + assert build["includes"] == ["doc/"] + assert build["excludes"] == ["doc/*.html"] def test_export_setup_py(fixture_project): @@ -181,3 +168,23 @@ def test_export_replace_project_root(project): req = parse_requirement(f"./{artifact.name}") result = requirements.export(project, [req], Namespace(hashes=False)) assert "${PROJECT_ROOT}" not in result + + +def test_convert_setup_py_project(project): + golden_file = FIXTURES / "projects/test-setuptools/setup.py" + assert setup_py.check_fingerprint(project, golden_file) + result, settings = setup_py.convert(project, golden_file, Namespace()) + assert result == { + "name": "mymodule", + "version": "0.1.0", + "description": "A test module", + "keywords": ["one", "two"], + "readme": "README.md", + "authors": [{"name": "frostming"}], + "license": {"text": "MIT"}, + "classifiers": ["Framework :: Django", "Programming Language :: Python :: 3"], + "requires-python": ">=3.5", + "dependencies": ['importlib-metadata; python_version<"3.8"', "requests"], + "scripts": {"mycli": "mymodule:main"}, + } + assert settings == {"package-dir": "src"} diff --git a/tests/test_installer.py b/tests/test_installer.py index dfc26a2efb..2ff74ca77c 100644 --- a/tests/test_installer.py +++ b/tests/test_installer.py @@ -2,10 +2,10 @@ import os import pytest +from unearth import Link from pdm.installers import InstallManager from pdm.models.candidates import Candidate -from pdm.models.pip_shims import Link from pdm.models.requirements import parse_requirement from pdm.utils import fs_supports_symlink from tests import FIXTURES @@ -118,7 +118,7 @@ def test_install_wheel_with_cache(project, invoke): assert os.path.islink(os.path.join(lib_path, "future_fstrings.py")) assert os.path.islink(os.path.join(lib_path, "aaaaa_future_fstrings.pth")) else: - assert os.path.isfile(os.path.join(lib_path, "future_fstrings.pth")) + assert os.path.isfile(os.path.join(lib_path, "aaa_future_fstrings.pth")) assert os.path.isfile(os.path.join(lib_path, "aaaaa_future_fstrings.pth")) cache_path = project.cache("packages") / "future_fstrings-1.2.0-py2.py3-none-any" @@ -132,7 +132,7 @@ def test_install_wheel_with_cache(project, invoke): assert not os.path.exists(os.path.join(lib_path, "future_fstrings.py")) assert not os.path.exists(os.path.join(lib_path, "aaaaa_future_fstrings.pth")) else: - assert not os.path.isfile(os.path.join(lib_path, "future_fstrings.pth")) + assert not os.path.isfile(os.path.join(lib_path, "aaa_future_fstrings.pth")) assert not os.path.isfile(os.path.join(lib_path, "aaaaa_future_fstrings.pth")) assert not dist.read_text("direct_url.json") assert not cache_path.exists() diff --git a/tests/test_integration.py b/tests/test_integration.py index 25dbe5fee5..2667a7c2fd 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -5,7 +5,7 @@ @pytest.mark.integration @pytest.mark.network -@pytest.mark.parametrize("python_version", ["2.7", "3.6", "3.7", "3.8", "3.9"]) +@pytest.mark.parametrize("python_version", ["3.6", "3.7", "3.8", "3.9", "3.10"]) def test_basic_integration(python_version, core, tmp_path, invoke): """An e2e test case to ensure PDM works on all supported Python versions""" project = core.create_project(tmp_path) diff --git a/tests/test_plugin.py b/tests/test_plugin.py index b2720ea8b6..557b4293e8 100644 --- a/tests/test_plugin.py +++ b/tests/test_plugin.py @@ -1,13 +1,9 @@ from unittest import mock from pdm.cli.commands.base import BaseCommand +from pdm.compat import importlib_metadata from pdm.project.config import ConfigItem -try: - import importlib.metadata as importlib_metadata -except ModuleNotFoundError: - import importlib_metadata - class HelloCommand(BaseCommand): def add_arguments(self, parser) -> None: diff --git a/tests/test_signals.py b/tests/test_signals.py index b12feae206..2bfd29b8df 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -9,7 +9,7 @@ def test_post_init_signal(project_no_init, invoke): with signals.post_init.connected_to(mock_handler): result = invoke(["init"], input="\n\n\n\n\n\n", obj=project_no_init) assert result.exit_code == 0 - mock_handler.assert_called_once_with(project_no_init) + mock_handler.assert_called_once_with(project_no_init, hooks=mock.ANY) def test_post_lock_and_install_signals(project, working_set, repository): diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000000..c3eaa3c211 --- /dev/null +++ b/tox.ini @@ -0,0 +1,13 @@ +[tox] +envlist = py3{7,8,9,10,11}, lint +passenv = LD_PRELOAD +isolated_build = True + +[testenv] +groups = test +commands = test {posargs} + +[testenv:lint] +groups = lint +skip_install = true +commands = lint